Skip to content

Commit

Permalink
fix data analysis description and tooltips hidden for large screens (m…
Browse files Browse the repository at this point in the history
  • Loading branch information
imatiach-msft authored Sep 27, 2023
1 parent cfe1037 commit 49d8c0d
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 15 deletions.
7 changes: 6 additions & 1 deletion libs/localization/src/lib/en.json
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,12 @@
"showTop": "Datapoints with the largest estimated causal responses to treatment feature: {0}",
"whatifDescription": "What-if allows you to perturb features for any input and observe how the model's prediction changes. You can perturb features manually or specify the desired prediction (e.g., class label for a classifier) to see a list of closest data points to the original input that would lead to the desired prediction. Also known as prediction counterfactuals, you can use them for exploring the relationships learnt by the model; understanding important, necessary features for the model's predictions; or debug edge-cases for the model. To start, choose input points from the data table or scatter plot."
},
"DataAnalysis": {
"TableView": {
"description": "View the dataset in a table representation with the ground truth and predicted labels. The table is separated into sub-tables of correct and incorrect predictions.",
"infoTitle": "Additional information on table view"
}
},
"ErrorAnalysis": {
"Cohort": {
"_cohort.comment": "a subset of the data is called a cohort",
Expand Down Expand Up @@ -1692,7 +1698,6 @@
"CorrectPredictions": "Correct predictions",
"GlobalExplanation": "Aggregate feature importance",
"IncorrectPredictions": "Incorrect predictions",
"InfoTitle": "Additional information on data analysis table view",
"IndividualFeatureTabular": "Select a datapoint by clicking on a datapoint (up to 5 datapoints) in the table to view their local feature importance values (local explanation) and individual conditional expectation (ICE) plots.",
"IndividualFeatureText": "Select a datapoint by clicking on a datapoint in the table to view the local feature importance values (local explanation).",
"LocalExplanation": "Individual feature importance",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ import {
IProcessedStyleSet,
getTheme
} from "@fluentui/react";
import { hideXxlUp } from "@responsible-ai/core-ui";

export interface ITabsViewStyles {
section: IStyle;
Expand Down Expand Up @@ -41,8 +40,7 @@ export const tabsViewStyles: () => IProcessedStyleSet<ITabsViewStyles> = () => {
textOverflow: "ellipsis"
},
sectionTooltip: {
display: "inline",
...hideXxlUp
display: "inline"
},
stackStyle: {
padding: "20px",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,17 +41,8 @@ export function getInfo(
body = localization.Interpret.DatasetExplorer.helperText;
title = localization.Interpret.DatasetExplorer.infoTitle;
} else if (dataAnalysisOption === DataAnalysisTabOptions.TableView) {
title = localization.ModelAssessment.FeatureImportances.InfoTitle;
let hasTextImportances = false;
if (props?.modelExplanationData) {
hasTextImportances =
!!props.modelExplanationData[0]?.precomputedExplanations
?.textFeatureImportance;
}
body = hasTextImportances
? localization.ModelAssessment.FeatureImportances.IndividualFeatureText
: localization.ModelAssessment.FeatureImportances
.IndividualFeatureTabular;
title = localization.DataAnalysis.TableView.infoTitle;
body = localization.DataAnalysis.TableView.description;
}
} else if (tabKey === GlobalTabKeys.CausalAnalysisTab) {
if (causalAnalysisOption === CausalAnalysisOptions.Aggregate) {
Expand Down

0 comments on commit 49d8c0d

Please sign in to comment.