From a73ba11dbf3978093b62e6257fec055adbf0fd2e Mon Sep 17 00:00:00 2001 From: Jacob Walls Date: Thu, 13 Jun 2024 17:59:56 -0400 Subject: [PATCH 1/7] Add formatting step to CI --- .github/workflows/main.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index ae2a0528..db1e8e78 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -61,6 +61,11 @@ jobs: echo "package.json not found, skipping npm commands." fi + - name: Check formatting + run: | + npm run prettier:check + black . --check --exclude=node_modules + - name: Run frontend tests run: | npm run vitest From f007fcb8ea884370391f3ef259a5a0b31d324b3a Mon Sep 17 00:00:00 2001 From: Jacob Walls Date: Thu, 13 Jun 2024 18:02:22 -0400 Subject: [PATCH 2/7] Format backend with black --- arches_lingo/celery.py | 6 +- arches_lingo/etl_modules/migrate_to_lingo.py | 171 ++++++++++++++----- arches_lingo/migrations/0001_initial.py | 8 +- arches_lingo/search_indexes/sample_index.py | 14 +- arches_lingo/tasks.py | 3 +- arches_lingo/wsgi.py | 9 +- manage.py | 4 +- tests/base_test.py | 4 +- tests/search_indexes/sample_index_tests.py | 21 ++- tests/test_settings.py | 21 ++- 10 files changed, 184 insertions(+), 77 deletions(-) diff --git a/arches_lingo/celery.py b/arches_lingo/celery.py index bdd18349..8ca893b7 100644 --- a/arches_lingo/celery.py +++ b/arches_lingo/celery.py @@ -7,7 +7,7 @@ if platform.system().lower() == "windows": os.environ.setdefault("FORKED_BY_MULTIPROCESSING", "1") -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'arches_lingo.settings') -app = Celery('arches_lingo') -app.config_from_object('django.conf:settings', namespace='CELERY') +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "arches_lingo.settings") +app = Celery("arches_lingo") +app.config_from_object("django.conf:settings", namespace="CELERY") app.autodiscover_tasks() diff --git a/arches_lingo/etl_modules/migrate_to_lingo.py b/arches_lingo/etl_modules/migrate_to_lingo.py index d9e26149..c938f213 100644 --- a/arches_lingo/etl_modules/migrate_to_lingo.py +++ b/arches_lingo/etl_modules/migrate_to_lingo.py @@ -7,7 +7,10 @@ from arches.app.datatypes.datatypes import DataTypeFactory from arches.app.etl_modules.save import save_to_tiles from arches.app.etl_modules.decorators import load_data_async -from arches.app.etl_modules.base_import_module import BaseImportModule, FileValidationError +from arches.app.etl_modules.base_import_module import ( + BaseImportModule, + FileValidationError, +) from arches.app.models import models from arches.app.models.concept import Concept from arches.app.models.models import LoadStaging, NodeGroup, LoadEvent @@ -47,17 +50,24 @@ def __init__(self, request=None, loadid=None): self.moduleid = request.POST.get("module") if request else None self.loadid = request.POST.get("loadid") if request else loadid self.datatype_factory = DataTypeFactory() - + def etl_schemes(self, cursor, nodegroup_lookup, node_lookup): schemes = [] - for concept in models.Concept.objects.filter(nodetype="ConceptScheme").prefetch_related("value_set"): + for concept in models.Concept.objects.filter( + nodetype="ConceptScheme" + ).prefetch_related("value_set"): scheme_to_load = {"type": "Scheme", "tile_data": []} for value in concept.value_set.all(): - scheme_to_load["resourceinstanceid"] = concept.pk # use old conceptid as new resourceinstanceid + scheme_to_load["resourceinstanceid"] = ( + concept.pk + ) # use old conceptid as new resourceinstanceid name = {} identifier = {} - if value.valuetype_id == "prefLabel" or value.valuetype_id == "altLabel": + if ( + value.valuetype_id == "prefLabel" + or value.valuetype_id == "altLabel" + ): name["name_content"] = value.value name["name_language"] = value.language_id name["name_type"] = value.valuetype_id @@ -67,18 +77,25 @@ def etl_schemes(self, cursor, nodegroup_lookup, node_lookup): identifier["identifier_type"] = value.valuetype_id scheme_to_load["tile_data"].append({"identifier": identifier}) schemes.append(scheme_to_load) - self.populate_staging_table(cursor, schemes, nodegroup_lookup, node_lookup) + self.populate_staging_table(cursor, schemes, nodegroup_lookup, node_lookup) def etl_concepts(self, cursor, nodegroup_lookup, node_lookup): concepts = [] - for concept in models.Concept.objects.filter(nodetype="Concept").prefetch_related("value_set"): + for concept in models.Concept.objects.filter( + nodetype="Concept" + ).prefetch_related("value_set"): concept_to_load = {"type": "Concept", "tile_data": []} for value in concept.value_set.all(): - concept_to_load["resourceinstanceid"] = concept.pk # use old conceptid as new resourceinstanceid + concept_to_load["resourceinstanceid"] = ( + concept.pk + ) # use old conceptid as new resourceinstanceid name = {} identifier = {} - if value.valuetype_id == "prefLabel" or value.valuetype_id == "altLabel": + if ( + value.valuetype_id == "prefLabel" + or value.valuetype_id == "altLabel" + ): name["name_content"] = value.value name["name_language"] = value.language_id name["name_type"] = value.valuetype_id @@ -90,8 +107,9 @@ def etl_concepts(self, cursor, nodegroup_lookup, node_lookup): concepts.append(concept_to_load) self.populate_staging_table(cursor, concepts, nodegroup_lookup, node_lookup) - - def populate_staging_table(self, cursor, concepts_to_load, nodegroup_lookup, node_lookup): + def populate_staging_table( + self, cursor, concepts_to_load, nodegroup_lookup, node_lookup + ): tiles_to_load = [] for concept_to_load in concepts_to_load: for mock_tile in concept_to_load["tile_data"]: @@ -100,23 +118,32 @@ def populate_staging_table(self, cursor, concepts_to_load, nodegroup_lookup, nod nodegroup_depth = nodegroup_lookup[nodegroup_id]["depth"] tile_id = uuid.uuid4() parent_tile_id = None - tile_value_json, passes_validation = self.create_tile_value(cursor, mock_tile, nodegroup_alias, nodegroup_lookup, node_lookup) + tile_value_json, passes_validation = self.create_tile_value( + cursor, mock_tile, nodegroup_alias, nodegroup_lookup, node_lookup + ) operation = "insert" - tiles_to_load.append(LoadStaging( - load_event=LoadEvent(self.loadid), - nodegroup=NodeGroup(nodegroup_id), - resourceid=concept_to_load["resourceinstanceid"], - tileid=tile_id, - parenttileid=parent_tile_id, - value=tile_value_json, - nodegroup_depth=nodegroup_depth, - source_description="{0}: {1}".format(concept_to_load["type"], nodegroup_alias), # source_description - passes_validation=passes_validation, - operation=operation, - )) + tiles_to_load.append( + LoadStaging( + load_event=LoadEvent(self.loadid), + nodegroup=NodeGroup(nodegroup_id), + resourceid=concept_to_load["resourceinstanceid"], + tileid=tile_id, + parenttileid=parent_tile_id, + value=tile_value_json, + nodegroup_depth=nodegroup_depth, + source_description="{0}: {1}".format( + concept_to_load["type"], nodegroup_alias + ), # source_description + passes_validation=passes_validation, + operation=operation, + ) + ) staged_tiles = LoadStaging.objects.bulk_create(tiles_to_load) - - cursor.execute("""CALL __arches_check_tile_cardinality_violation_for_load(%s)""", [self.loadid]) + + cursor.execute( + """CALL __arches_check_tile_cardinality_violation_for_load(%s)""", + [self.loadid], + ) cursor.execute( """ INSERT INTO load_errors (type, source, error, loadid, nodegroupid) @@ -127,7 +154,9 @@ def populate_staging_table(self, cursor, concepts_to_load, nodegroup_lookup, nod [self.loadid], ) - def create_tile_value(self, cursor, mock_tile, nodegroup_alias, nodegroup_lookup, node_lookup): + def create_tile_value( + self, cursor, mock_tile, nodegroup_alias, nodegroup_lookup, node_lookup + ): tile_value = {} tile_valid = True for node_alias in mock_tile[nodegroup_alias].keys(): @@ -140,8 +169,10 @@ def create_tile_value(self, cursor, mock_tile, nodegroup_alias, nodegroup_lookup config = node_details["config"] config["loadid"] = self.loadid config["nodeid"] = nodeid - - value, validation_errors = self.prepare_data_for_loading(datatype_instance, source_value, config) + + value, validation_errors = self.prepare_data_for_loading( + datatype_instance, source_value, config + ) valid = True if len(validation_errors) == 0 else False if not valid: tile_valid = False @@ -150,18 +181,34 @@ def create_tile_value(self, cursor, mock_tile, nodegroup_alias, nodegroup_lookup error_message = error["message"] cursor.execute( """INSERT INTO load_errors (type, value, source, error, message, datatype, loadid, nodeid) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)""", - ("node", source_value, "", error["title"], error["message"], datatype, self.loadid, nodeid), + ( + "node", + source_value, + "", + error["title"], + error["message"], + datatype, + self.loadid, + nodeid, + ), ) - - tile_value[nodeid] = {"value": value, "valid": valid, "source": source_value, "notes": error_message, "datatype": datatype} + + tile_value[nodeid] = { + "value": value, + "valid": valid, + "source": source_value, + "notes": error_message, + "datatype": datatype, + } except KeyError: - pass + pass return tile_value, tile_valid - + def init_relationships(self, cursor, loadid): # Create top concept of scheme relationships (derived from relations with 'hasTopConcept' relationtype) - cursor.execute(""" + cursor.execute( + """ insert into load_staging( value, resourceid, @@ -193,10 +240,17 @@ def init_relationships(self, cursor, loadid): 'insert' as operation from relations where relationtype = 'hasTopConcept'; - """, (CONCEPTS_TOP_CONCEPT_OF_NODEGROUP_ID, loadid, CONCEPTS_TOP_CONCEPT_OF_NODEGROUP_ID)) + """, + ( + CONCEPTS_TOP_CONCEPT_OF_NODEGROUP_ID, + loadid, + CONCEPTS_TOP_CONCEPT_OF_NODEGROUP_ID, + ), + ) # Create broader relationships (derived from relations with 'narrower' relationtype) - cursor.execute(""" + cursor.execute( + """ insert into load_staging( value, resourceid, @@ -228,11 +282,14 @@ def init_relationships(self, cursor, loadid): 'insert' as operation from relations where relationtype = 'narrower'; - """, (CONCEPTS_BROADER_NODEGROUP_ID, loadid, CONCEPTS_BROADER_NODEGROUP_ID)) + """, + (CONCEPTS_BROADER_NODEGROUP_ID, loadid, CONCEPTS_BROADER_NODEGROUP_ID), + ) # Create Part of Scheme relationships - derived by recursively generating concept hierarchy & associating # concepts with their schemes - cursor.execute(""" + cursor.execute( + """ insert into load_staging( value, resourceid, @@ -274,14 +331,28 @@ def init_relationships(self, cursor, loadid): %s::uuid as nodegroupid, 'insert' as operation FROM concept_hierarchy; - """, (CONCEPTS_PART_OF_SCHEME_NODEGROUP_ID, loadid, CONCEPTS_PART_OF_SCHEME_NODEGROUP_ID)) + """, + ( + CONCEPTS_PART_OF_SCHEME_NODEGROUP_ID, + loadid, + CONCEPTS_PART_OF_SCHEME_NODEGROUP_ID, + ), + ) def start(self, request): load_details = {"operation": "RDM to Lingo Migration"} cursor = connection.cursor() cursor.execute( """INSERT INTO load_event (loadid, complete, status, etl_module_id, load_details, load_start_time, user_id) VALUES (%s, %s, %s, %s, %s, %s, %s)""", - (self.loadid, False, "running", self.moduleid, json.dumps(load_details), datetime.now(), self.userid), + ( + self.loadid, + False, + "running", + self.moduleid, + json.dumps(load_details), + datetime.now(), + self.userid, + ), ) message = "load event created" return {"success": True, "data": message} @@ -290,22 +361,26 @@ def write(self, request): self.loadid = request.POST.get("loadid") if models.Concept.objects.count() < 500: response = self.run_load_task(self.userid, self.loadid) - else: + else: response = self.run_load_task_async(request, self.loadid) message = "Schemes and Concept Migration to Lingo Models Complete" return {"success": True, "data": message} def run_load_task(self, userid, loadid): self.loadid = loadid # currently redundant, but be certain - + with connection.cursor() as cursor: # Gather and load schemes and concepts - schemes_nodegroup_lookup, schemes_nodes = self.get_graph_tree(SCHEMES_GRAPH_ID) + schemes_nodegroup_lookup, schemes_nodes = self.get_graph_tree( + SCHEMES_GRAPH_ID + ) schemes_node_lookup = self.get_node_lookup(schemes_nodes) self.etl_schemes(cursor, schemes_nodegroup_lookup, schemes_node_lookup) - concepts_nodegroup_lookup, concepts_nodes = self.get_graph_tree(CONCEPTS_GRAPH_ID) + concepts_nodegroup_lookup, concepts_nodes = self.get_graph_tree( + CONCEPTS_GRAPH_ID + ) concepts_node_lookup = self.get_node_lookup(concepts_nodes) self.etl_concepts(cursor, concepts_nodegroup_lookup, concepts_node_lookup) @@ -320,11 +395,13 @@ def run_load_task(self, userid, loadid): ("validated", loadid), ) response = save_to_tiles(userid, loadid) - cursor.execute("""CALL __arches_update_resource_x_resource_with_graphids();""") + cursor.execute( + """CALL __arches_update_resource_x_resource_with_graphids();""" + ) cursor.execute("""SELECT __arches_refresh_spatial_views();""") refresh_successful = cursor.fetchone()[0] if not refresh_successful: - raise Exception('Unable to refresh spatial views') + raise Exception("Unable to refresh spatial views") return response else: cursor.execute( diff --git a/arches_lingo/migrations/0001_initial.py b/arches_lingo/migrations/0001_initial.py index 0e387196..987ebe6f 100644 --- a/arches_lingo/migrations/0001_initial.py +++ b/arches_lingo/migrations/0001_initial.py @@ -23,15 +23,17 @@ def add_plugins(apps, schema_editor): componentname="reference-data-manager", config={}, slug="reference-data-manager", - sortorder=0 + sortorder=0, ) def remove_plugin(apps, schema_editor): Plugin = apps.get_model("models", "Plugin") - for plugin in Plugin.objects.filter(pluginid__in=["29321ce0-bd95-4357-a2a5-822e9cb06f70"]): + for plugin in Plugin.objects.filter( + pluginid__in=["29321ce0-bd95-4357-a2a5-822e9cb06f70"] + ): plugin.delete() operations = [ migrations.RunPython(add_plugins, remove_plugin), - ] \ No newline at end of file + ] diff --git a/arches_lingo/search_indexes/sample_index.py b/arches_lingo/search_indexes/sample_index.py index 3c231556..cd9c1e83 100644 --- a/arches_lingo/search_indexes/sample_index.py +++ b/arches_lingo/search_indexes/sample_index.py @@ -3,8 +3,18 @@ class SampleIndex(BaseIndex): def prepare_index(self): - self.index_metadata = {"mappings": {"properties": {"tile_count": {"type": "keyword"}, "graph_id": {"type": "keyword"}}}} + self.index_metadata = { + "mappings": { + "properties": { + "tile_count": {"type": "keyword"}, + "graph_id": {"type": "keyword"}, + } + } + } super(SampleIndex, self).prepare_index() def get_documents_to_index(self, resourceinstance, tiles): - return ({"tile_count": len(tiles), "graph_id": resourceinstance.graph_id}, str(resourceinstance.resourceinstanceid)) + return ( + {"tile_count": len(tiles), "graph_id": resourceinstance.graph_id}, + str(resourceinstance.resourceinstanceid), + ) diff --git a/arches_lingo/tasks.py b/arches_lingo/tasks.py index 0a506ae9..104cd9ae 100644 --- a/arches_lingo/tasks.py +++ b/arches_lingo/tasks.py @@ -3,9 +3,10 @@ from django.contrib.auth.models import User from django.utils.translation import gettext as _ from arches.app.models import models -from arches_lingo.etl_modules import migrate_to_lingo +from arches_lingo.etl_modules import migrate_to_lingo from arches.app.tasks import notify_completion + @shared_task def migrate_rdm_to_lingo_task(userid, loadid): logger = logging.getLogger(__name__) diff --git a/arches_lingo/wsgi.py b/arches_lingo/wsgi.py index 48a44ba3..fea57740 100644 --- a/arches_lingo/wsgi.py +++ b/arches_lingo/wsgi.py @@ -1,4 +1,4 @@ -''' +""" ARCHES - a program developed to inventory and manage immovable cultural heritage. Copyright (C) 2013 J. Paul Getty Trust and World Monuments Fund @@ -14,11 +14,12 @@ You should have received a copy of the GNU Affero General Public License along with this program. If not, see . -''' +""" import os import sys import inspect + path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) if path not in sys.path: @@ -27,10 +28,12 @@ # reverting back to the old style of setting the DJANGO_SETTINGS_MODULE env variable # refer to the following blog post under the heading "Leaking of process environment variables." # http://blog.dscpl.com.au/2012/10/requests-running-in-wrong-django.html -os.environ['DJANGO_SETTINGS_MODULE'] = "arches_lingo.settings" +os.environ["DJANGO_SETTINGS_MODULE"] = "arches_lingo.settings" from django.core.wsgi import get_wsgi_application + application = get_wsgi_application() from arches.app.models.system_settings import settings + settings.update_from_db() diff --git a/manage.py b/manage.py index aa50c1a6..0f11e261 100644 --- a/manage.py +++ b/manage.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -''' +""" ARCHES - a program developed to inventory and manage immovable cultural heritage. Copyright (C) 2013 J. Paul Getty Trust and World Monuments Fund @@ -16,7 +16,7 @@ You should have received a copy of the GNU Affero General Public License along with this program. If not, see . -''' +""" import os diff --git a/tests/base_test.py b/tests/base_test.py index 41a3273c..a0168aa0 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -43,7 +43,9 @@ token, expires, scope, application_id, user_id, created, updated) VALUES ('{token}', '1-1-2068', 'read write', 44, {user_id}, '1-1-2018', '1-1-2018'); """ -DELETE_TOKEN_SQL = "DELETE FROM public.oauth2_provider_accesstoken WHERE application_id = 44;" +DELETE_TOKEN_SQL = ( + "DELETE FROM public.oauth2_provider_accesstoken WHERE application_id = 44;" +) class ArchesTestRunner(DiscoverRunner): diff --git a/tests/search_indexes/sample_index_tests.py b/tests/search_indexes/sample_index_tests.py index 721c1f01..aca30235 100644 --- a/tests/search_indexes/sample_index_tests.py +++ b/tests/search_indexes/sample_index_tests.py @@ -4,21 +4,34 @@ from arches_lingo.search_indexes.sample_index import SampleIndex from django.test import TestCase + class TestSampleIndex(TestCase): def test_prepare_index(self): sample_index = SampleIndex(index_name="Sample Index") sample_index.prepare_index() - expected_index_metadata = {"mappings": {"properties": {"tile_count": {"type": "keyword"}, "graph_id": {"type": "keyword"}}}} + expected_index_metadata = { + "mappings": { + "properties": { + "tile_count": {"type": "keyword"}, + "graph_id": {"type": "keyword"}, + } + } + } self.assertEqual(sample_index.index_metadata, expected_index_metadata) def test_get_documents_to_index(self): sample_index = SampleIndex(index_name="Sample Index") - + mock_resourceinstance = Mock(graph_id="test_graph_id") mock_tiles = [Mock(), Mock(), Mock()] # Mock tiles list - documents, doc_id = sample_index.get_documents_to_index(mock_resourceinstance, mock_tiles) + documents, doc_id = sample_index.get_documents_to_index( + mock_resourceinstance, mock_tiles + ) - self.assertEqual(documents, {"tile_count": len(mock_tiles), "graph_id": mock_resourceinstance.graph_id}) + self.assertEqual( + documents, + {"tile_count": len(mock_tiles), "graph_id": mock_resourceinstance.graph_id}, + ) self.assertEqual(doc_id, str(mock_resourceinstance.resourceinstanceid)) diff --git a/tests/test_settings.py b/tests/test_settings.py index d1b487ec..9693188e 100644 --- a/tests/test_settings.py +++ b/tests/test_settings.py @@ -26,12 +26,12 @@ pass PACKAGE_NAME = "arches_lingo" -APP_NAME = 'arches_lingo' +APP_NAME = "arches_lingo" APP_ROOT = os.path.dirname(__file__) TEST_ROOT = os.path.normpath(os.path.join(ROOT_DIR, "..", "tests")) -ROOT_URLCONF = 'arches_lingo.urls' +ROOT_URLCONF = "arches_lingo.urls" ARCHES_APPLICATIONS = () @@ -67,14 +67,9 @@ "PASSWORD": "postgis", "PORT": "5432", "POSTGIS_TEMPLATE": "template_postgis", - "TEST": { - "CHARSET": None, - "COLLATION": None, - "MIRROR": None, - "NAME": None - }, + "TEST": {"CHARSET": None, "COLLATION": None, "MIRROR": None, "NAME": None}, "TIME_ZONE": None, - "USER": "postgres" + "USER": "postgres", } } @@ -93,7 +88,9 @@ ELASTICSEARCH_PREFIX = "test" TEST_RUNNER = "tests.base_test.ArchesTestRunner" -SILENCED_SYSTEM_CHECKS.append("arches.W001") # Cache backend does not support rate-limiting +SILENCED_SYSTEM_CHECKS.append( + "arches.W001" +) # Cache backend does not support rate-limiting # could add Chrome, PhantomJS etc... here LOCAL_BROWSERS = [] # ['Firefox'] @@ -107,7 +104,9 @@ FORCE_TWO_FACTOR_AUTHENTICATION = False DATATYPE_LOCATIONS.append("tests.fixtures.datatypes") -ELASTICSEARCH_HOSTS = [{"scheme": "http", "host": "localhost", "port": ELASTICSEARCH_HTTP_PORT}] +ELASTICSEARCH_HOSTS = [ + {"scheme": "http", "host": "localhost", "port": ELASTICSEARCH_HTTP_PORT} +] LANGUAGES = [ ("de", _("German")), ("en", _("English")), From d078b6c74f2008d2517b054c6a50f758103b9319 Mon Sep 17 00:00:00 2001 From: Jacob Walls Date: Thu, 13 Jun 2024 18:03:55 -0400 Subject: [PATCH 3/7] Remove cruft from base_test.py --- tests/base_test.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/tests/base_test.py b/tests/base_test.py index a0168aa0..c02b342c 100644 --- a/tests/base_test.py +++ b/tests/base_test.py @@ -35,18 +35,6 @@ # these tests can be run from the command line via # python manage.py test tests --pattern="*.py" --settings="tests.test_settings" -OAUTH_CLIENT_ID = "AAac4uRQSqybRiO6hu7sHT50C4wmDp9fAmsPlCj9" -OAUTH_CLIENT_SECRET = "7fos0s7qIhFqUmalDI1QiiYj0rAtEdVMY4hYQDQjOxltbRCBW3dIydOeMD4MytDM9ogCPiYFiMBW6o6ye5bMh5dkeU7pg1cH86wF6B\ - ap9Ke2aaAZaeMPejzafPSj96ID" -CREATE_TOKEN_SQL = """ - INSERT INTO public.oauth2_provider_accesstoken( - token, expires, scope, application_id, user_id, created, updated) - VALUES ('{token}', '1-1-2068', 'read write', 44, {user_id}, '1-1-2018', '1-1-2018'); - """ -DELETE_TOKEN_SQL = ( - "DELETE FROM public.oauth2_provider_accesstoken WHERE application_id = 44;" -) - class ArchesTestRunner(DiscoverRunner): def __init__(self, *args, **kwargs) -> None: From d8d2c8272e501891b08a3ca8848944ba5dfd6c34 Mon Sep 17 00:00:00 2001 From: Jacob Walls Date: Thu, 13 Jun 2024 18:12:14 -0400 Subject: [PATCH 4/7] Format frontend --- .editorconfig | 2 ++ arches_lingo/src/App.vue | 2 +- arches_lingo/src/components/FooComponent.vue | 4 ++-- arches_lingo/src/declarations.d.ts | 9 ++++----- 4 files changed, 9 insertions(+), 8 deletions(-) create mode 100644 .editorconfig diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..930c4915 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,2 @@ +[*] +indent_size = 4 diff --git a/arches_lingo/src/App.vue b/arches_lingo/src/App.vue index 6a77d520..d46924ff 100644 --- a/arches_lingo/src/App.vue +++ b/arches_lingo/src/App.vue @@ -4,4 +4,4 @@ import Foo from "@/components/FooComponent.vue"; \ No newline at end of file + diff --git a/arches_lingo/src/components/FooComponent.vue b/arches_lingo/src/components/FooComponent.vue index cf8bd83d..f4bf0a75 100644 --- a/arches_lingo/src/components/FooComponent.vue +++ b/arches_lingo/src/components/FooComponent.vue @@ -1,7 +1,7 @@ \ No newline at end of file + diff --git a/arches_lingo/src/declarations.d.ts b/arches_lingo/src/declarations.d.ts index d9f47135..e03bbaa8 100644 --- a/arches_lingo/src/declarations.d.ts +++ b/arches_lingo/src/declarations.d.ts @@ -1,11 +1,10 @@ // import declarations from other projects or Arches core -import('../../node_modules/arches/arches/app/src/declarations.d.ts'); +import("../../node_modules/arches/arches/app/src/declarations.d.ts"); // declare untyped modules that have been added to your project in `package.json` // Module homepage on npmjs.com uses logos "TS" or "DT" to indicate if typed -declare module 'arches'; +declare module "arches"; // declare filetypes used in `./src/` folder -declare module '*.ts'; -declare module '*.vue'; - +declare module "*.ts"; +declare module "*.vue"; From 62d73b7f673bf0d668fabc8d4d4ffe0506dd1791 Mon Sep 17 00:00:00 2001 From: Jacob Walls Date: Thu, 13 Jun 2024 18:15:32 -0400 Subject: [PATCH 5/7] Use prettierrc instead --- .editorconfig | 2 -- .prettierrc | 3 ++- 2 files changed, 2 insertions(+), 3 deletions(-) delete mode 100644 .editorconfig diff --git a/.editorconfig b/.editorconfig deleted file mode 100644 index 930c4915..00000000 --- a/.editorconfig +++ /dev/null @@ -1,2 +0,0 @@ -[*] -indent_size = 4 diff --git a/.prettierrc b/.prettierrc index c4d8ac54..3baef1c7 100644 --- a/.prettierrc +++ b/.prettierrc @@ -1,3 +1,4 @@ { - "singleAttributePerLine": true + "singleAttributePerLine": true, + "tabWidth": 4 } From e931d4a6938537c63c348410375572b2b48b5e52 Mon Sep 17 00:00:00 2001 From: Jacob Walls Date: Fri, 14 Jun 2024 15:25:50 -0400 Subject: [PATCH 6/7] Update dev requirements --- arches_lingo/install/requirements_dev.txt | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/arches_lingo/install/requirements_dev.txt b/arches_lingo/install/requirements_dev.txt index 5583a9a5..add8a555 100644 --- a/arches_lingo/install/requirements_dev.txt +++ b/arches_lingo/install/requirements_dev.txt @@ -2,4 +2,6 @@ livereload sst coverage sauceclient -django-silk==5.1.0 \ No newline at end of file +django-silk==5.1.0 +pre-commit +black==24.4.2 From 6098b0592ea3641c131a1f28c59a0fdc29b9ac54 Mon Sep 17 00:00:00 2001 From: Jacob Walls Date: Fri, 14 Jun 2024 15:26:21 -0400 Subject: [PATCH 7/7] Remove sauceclient --- arches_lingo/install/requirements_dev.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/arches_lingo/install/requirements_dev.txt b/arches_lingo/install/requirements_dev.txt index add8a555..80981ab4 100644 --- a/arches_lingo/install/requirements_dev.txt +++ b/arches_lingo/install/requirements_dev.txt @@ -1,7 +1,6 @@ livereload sst coverage -sauceclient django-silk==5.1.0 pre-commit black==24.4.2