From 93dbcf86657ac81963d0b3f03b310dc763ea90b3 Mon Sep 17 00:00:00 2001 From: Emerson Rocha Date: Tue, 29 Jun 2021 21:19:36 -0300 Subject: [PATCH] hdp-toolchain v0.8.8.0 started (added hxltmcli v0.8.1) refs - hxltm-cli-xliff (#19) - hxltm-cli-tmx (#20) - HXL-CPLP/Auxilium-Humanitarium-API#16 - HXL-CPLP/Auxilium-Humanitarium-API#13 - HXL-CPLP/forum#58 --- README.md | 39 ++++++ bin/{hxltm-cli => hxltmcli} | 251 +++++++++++++++++------------------ hxlm/core/bin/hxltmcli.py | 1 + setup.cfg | 253 ++++++++++++++++++------------------ 4 files changed, 286 insertions(+), 258 deletions(-) rename bin/{hxltm-cli => hxltmcli} (80%) create mode 120000 hxlm/core/bin/hxltmcli.py diff --git a/README.md b/README.md index f0b8b2b..b3ba8be 100644 --- a/README.md +++ b/README.md @@ -32,6 +32,11 @@ exported from HXL (The Humanitarian Exchange Language)** - [1.4.1 HDP conventions (The YAML/JSON file structure)](#141-hdp-conventions-the-yamljson-file-structure) - [1.4.2 `hdpcli` (command line interface)](#142-hdpcli-command-line-interface) - [1.4.3 `HXLm.HDP` (python library subpackage) usage](#143-hxlmhdp-python-library-subpackage-usage) + - [1.5 `HXLTM` HXL Trānslātiōnem Memoriam](#15-hxltm-hxl-trānslātiōnem-memoriam) + - [1.5.1 TMX](#151-tmx) + - [1.5.2 XLIFF](#152-xliff) + - [1.5.3 CSV, reference format (multilingual)](#153-csv-reference-format-multilingual) + - [1.5.4 CSV, source + target format (bilingual)](#154-csv-source--target-format-bilingual) - [2. Reasons behind](#2-reasons-behind) - [2.1 Why?](#21-why) - [2.2 How?](#22-how) @@ -409,6 +414,40 @@ python3 -m pip install hdp-toolchain - https://drive.google.com/drive/u/1/folders/1Zs-hw6y2ZHMgYXjGY1QbhrXn2UmheUEO +#### 1.5 `HXLTM` HXL Trānslātiōnem Memoriam +> This is a early draft. + +- Standard: **Translation Memory eXchange (TMX) v1.4b** + - https://www.gala-global.org/lisa-oscar-standards + - https://en.wikipedia.org/wiki/Translation_Memory_eXchange + - Example of usages + - https://cloud.google.com/translate/automl/docs/prepare + - https://mymemory.translated.net/doc/from-empty-tm.php + - https://site.matecat.com/faq/translation-memory/ +- Issues: + - **HXL-CPLP/forum/issues/** + - [**_HXL-CPLP/forum/issues/58: Convenção de tags HXL em conjunto de dados para armazenar Memória de Tradução (eng: HXL translation memory TM) \#58_**](https://github.com/HXL-CPLP/forum/issues/58) + - **HXL-CPLP/Auxilium-Humanitarium-API** + - **[HXL-CPLP/Auxilium-Humanitarium-API: [Hapi versão Alpha] Fluxo de trabalho de de traduções até geração do Hapi (do website, dos schemas e das OpenAPI)](https://github.com/HXL-CPLP/Auxilium-Humanitarium-API/issues/13)** + - **[HXL-CPLP/Auxilium-Humanitarium-API: [MVP] Exportar de formato "HXL TM" (eng: HXL translation memory) para um ou mais formatos já usados por softwares de localização](https://github.com/HXL-CPLP/Auxilium-Humanitarium-API/issues/16)** + - **EticaAI/HXL-Data-Science-file-formats** + - _**hxltm2xliff: HXL Trānslātiōnem Memoriam -> XLIFF Version 2.1 #19**_ +- Test projects + - https://github.com/UNMigration/HTCDS + - https://docs.google.com/spreadsheets/d/1ih3ouvx_n8W5ntNcYBqoyZ2NRMdaA0LRg5F9mGriZm4/edit#gid=1292720422 + +##### 1.5.1 TMX +> TODO: document minimal usage + +##### 1.5.2 XLIFF +> TODO: document minimal usage + +##### 1.5.3 CSV, reference format (multilingual) +> TODO: document minimal usage + +##### 1.5.4 CSV, source + target format (bilingual) +> TODO: document minimal usage + ### 2. Reasons behind #### 2.1 Why? diff --git a/bin/hxltm-cli b/bin/hxltmcli similarity index 80% rename from bin/hxltm-cli rename to bin/hxltmcli index 6763aa0..c47261e 100755 --- a/bin/hxltm-cli +++ b/bin/hxltmcli @@ -1,19 +1,21 @@ #!/usr/bin/env python3 # ============================================================================== # -# FILE: hxltm-cli.py +# FILE: hxltmcli.py # -# USAGE: hxltm-cli schemam-un-htcds.tm.hxl.csv schemam-un-htcds.xliff -# cat schemam-un-htcds.tm.hxl.csv | hxltm-cli > schemam-un-htcds.xliff +# USAGE: hxltmcli un-htcds.tm.hxl.csv un-htcds.xliff +# cat un-htcds.tm.hxl.csv | hxltmcli > un-htcds.xliff # -# DESCRIPTION: _[eng-Latn] hxltm-cli is an working draft of a tool to +# DESCRIPTION: _[eng-Latn] hxltmcli is an working draft of a tool to # convert prototype of translation memory stored # with HXL to XLIFF v2.1 # [eng-Latn]_ -# @see http://docs.oasis-open.org/xliff/xliff-core/v2.1/os/xliff-core-v2.1-os.html +# @see http://docs.oasis-open.org/xliff/xliff-core/v2.1 +# /os/xliff-core-v2.1-os.html # @see https://www.gala-global.org/lisa-oscar-standards # @see https://github.com/HXL-CPLP/forum/issues/58 -# @see https://github.com/HXL-CPLP/Auxilium-Humanitarium-API/issues/16 +# @see https://github.com/HXL-CPLP/Auxilium-Humanitarium-API +# /issues/16 # # OPTIONS: --- # @@ -27,25 +29,30 @@ # SPDX-License-Identifier: Unlicense # VERSION: v0.8.1 # CREATED: 2021-06-27 19:50 UTC v0.5, de github.com/EticaAI -# /HXL-Data-Science-file-formats/blob/main/bin/hxl2example +# /HXL-Data-Science-file-formats/blob/main/bin/hxl2example # REVISION: 2021-06-27 21:16 UTC v0.6 de hxl2tab # REVISION: 2021-06-27 23:53 UTC v0.7 --archivum-extensionem=.csv # 2021-06-29 22:29 UTC v0.8 MVP of --archivum-extensionem=.tmx -# Translation Memory eXchange format (TMX). -# 2021-06-29 23:16 UTC v0.8.1 hxltm2xliff renamed to hxltm-cli; +# Translation Memory eXchange format (TMX). +# 2021-06-29 23:16 UTC v0.8.1 hxltm2xliff renamed to hxltmcli; # Moved from github.com/HXL-CPLP/Auxilium-Humanitarium-API # to github.com/EticaAI/HXL-Data-Science-file-formats # ============================================================================== # Tests -# Exemplos: https://github.com/oasis-tcs/xliff-xliff-22/blob/master/xliff-21/test-suite/core/valid/allExtensions.xlf -# ./_systema/programma/hxltm-cli.py --help -# ./_systema/programma/hxltm-cli.py _hxltm/schemam-un-htcds.tm.hxl.csv -# ./_systema/programma/hxltm-cli.py _hxltm/schemam-un-htcds-5items.tm.hxl.csv -# ./_systema/programma/hxltm-cli.py _hxltm/schemam-un-htcds.tm.hxl.csv --fontem-linguam=eng-Latn -# ./_systema/programma/hxltm-cli.py _hxltm/schemam-un-htcds-5items.tm.hxl.csv --fontem-linguam=eng-Latn --archivum-extensionem=.tmx -# ./_systema/programma/hxltm-cli.py _hxltm/schemam-un-htcds-5items.tm.hxl.csv _hxltm/schemam-un-htcds-5items.tmx --fontem-linguam=eng-Latn --archivum-extensionem=.tmx -# python3 -m doctest ./_systema/programma/hxltm-cli.py +# Exemplos: https://github.com/oasis-tcs/xliff-xliff-22/blob/master/xliff-21 +# /test-suite/core/valid/allExtensions.xlf +# ./_systema/programma/hxltmcli.py --help +# ./_systema/programma/hxltmcli.py _hxltm/schemam-un-htcds.tm.hxl.csv +# ./_systema/programma/hxltmcli.py _hxltm/schemam-un-htcds-5items.tm.hxl.csv +# ./_systema/programma/hxltmcli.py _hxltm/schemam-un-htcds.tm.hxl.csv \ +# --fontem-linguam=eng-Latn +# ./_systema/programma/hxltmcli.py _hxltm/schemam-un-htcds-5items.tm.hxl.csv \ +# --fontem-linguam=eng-Latn --archivum-extensionem=.tmx +# ./_systema/programma/hxltmcli.py _hxltm/schemam-un-htcds-5items.tm.hxl.csv \ +# _hxltm/schemam-un-htcds-5items.tmx --fontem-linguam=eng-Latn \ +# --archivum-extensionem=.tmx +# python3 -m doctest ./_systema/programma/hxltmcli.py __VERSION__ = "v0.8.1" @@ -54,6 +61,9 @@ import os import logging import argparse +import csv +import tempfile + # @see https://github.com/HXLStandard/libhxl-python # pip3 install libhxl --upgrade # Do not import hxl, to avoid circular imports @@ -61,15 +71,14 @@ import hxl.converters import hxl.filters import hxl.io -import csv -import tempfile # In Python2, sys.stdin is a byte stream; in Python3, it's a text stream STDIN = sys.stdin.buffer + class HXLTMCLI: """ - _[eng-Latn] hxltm-cli is an working draft of a tool to + _[eng-Latn] hxltmcli is an working draft of a tool to convert prototype of translation memory stored with HXL to XLIFF v2.1 [eng-Latn]_ @@ -97,13 +106,13 @@ class HXLTMCLI: self.hxlhelper = HXLUtils() parser = self.hxlhelper.make_args( # description=(""" - # _[eng-Latn] hxltm-cli is an working draft of a tool to - # convert prototype of translation memory stored with HXL to - # XLIFF v2.1 + # _[eng-Latn] hxltmcli is an working draft of a tool to + # convert prototype of translation memory stored with + # HXL to XLIFF v2.1 # [eng-Latn]_ # """) description=( - "_[eng-Latn] hxltm-cli " + __VERSION__ + " " + + "_[eng-Latn] hxltmcli " + __VERSION__ + " " + "is an working draft of a tool to " + "convert prototype of translation memory stored with HXL to " + "XLIFF v2.1 [eng-Latn]_" @@ -303,11 +312,6 @@ class HXLTMCLI: @see http://xml.coverpages.org/tmxSpec971212.html """ - # example of test file https://github.com/rmeertens/tmxtools/blob/master/tests/testfiles/test1.tmx - # TODO: implement something like internal structure of a object, - # maybe csv.DictReader? - # @see https://www.geeksforgeeks.org/convert-csv-to-json-using-python/ - # @see https://docs.python.org/3/library/csv.html#csv.DictReader datum = [] with open(hxlated_input, 'r') as csv_file: @@ -326,15 +330,15 @@ class HXLTMCLI: datum.append(HXLTMUtil.tmx_item_relevan_options(item)) - # @examplum https://cloud.google.com/translate/automl/docs/prepare#translation_memory_exchange_tmx - # @examplum https://www.gala-global.org/knowledge-center/industry-development/standards/lisa-oscar-standards resultatum = [] resultatum.append("") resultatum.append('') resultatum.append('') - # @see https://www.gala-global.org/sites/default/files/migrated-pages/docs/tmx14%20%281%29.dtd + # @see https://www.gala-global.org/sites/default/files/migrated-pages + # /docs/tmx14%20%281%29.dtd resultatum.append( - '
') # TODO: make source and adminlang configurable @@ -344,22 +348,19 @@ class HXLTMCLI: for rem in datum: num += 1 - # unit_id = rem['#x_xliff+unit+id'] if rem.has_key('#x_xliff+unit+id') else num - # print(type (rem)) - # print(rem) unit_id = rem['#item+id'] if '#item+id' in rem else num resultatum.append(' ') if '#item+wikidata+code' in rem and rem['#item+wikidata+code']: resultatum.append( - ' ' + rem['#item+wikidata+code'] + '') + ' ' + + rem['#item+wikidata+code'] + '') if '#meta+item+url+list' in rem and rem['#meta+item+url+list']: resultatum.append( # TODO: improve naming - ' ' + rem['#meta+item+url+list'] + '') - - # TODO: reduzir repetitividade; os valores estao hardcoded. Não ideal. + ' ' + \ + rem['#meta+item+url+list'] + '') hattrsl = HXLTMUtil.hxllangattrs_list_from_item(rem) # print(hattrsl) @@ -373,22 +374,6 @@ class HXLTMCLI: ' ' + rem['#item' + langattrs] + '') resultatum.append(' ') - # if '#item+i_la+i_lat+is_latn' in rem: - # resultatum.append(' ') - # resultatum.append( - # ' ' + rem['#item+i_la+i_lat+is_latn'] + '') - - # if '#item+i_pt+i_por+is_latn' in rem: - # resultatum.append(' ') - # resultatum.append( - # ' ' + rem['#item+i_pt+i_por+is_latn'] + '') - - # resultatum.append(' ') - # if '#item+i_en+i_eng+is_latn' in rem: - # resultatum.append(' ') - # resultatum.append( - # ' ' + rem['#item+i_en+i_eng+is_latn'] + '') - resultatum.append(' ') resultatum.append(' ') @@ -414,10 +399,6 @@ class HXLTMCLI: TODO: this is a work-in-progress at this moment, 2021-06-28 """ - # TODO: implement something like internal structure of a object, - # maybe csv.DictReader? - # @see https://www.geeksforgeeks.org/convert-csv-to-json-using-python/ - # @see https://docs.python.org/3/library/csv.html#csv.DictReader datum = [] with open(hxlated_input, 'r') as csv_file: @@ -432,15 +413,20 @@ class HXLTMCLI: resultatum = [] resultatum.append('') resultatum.append( - '') + '') resultatum.append(' ') num = 0 for rem in datum: num += 1 - # unit_id = rem['#x_xliff+unit+id'] if rem.has_key('#x_xliff+unit+id') else num - unit_id = rem['#x_xliff+unit+id'] if rem['#x_xliff+unit+id'] else num + if '#x_xliff+unit+id' in rem and rem['#x_xliff+unit+id']: + unit_id = rem['#x_xliff+unit+id'] + else: + unit_id = num + # unit_id = rem['#x_xliff+unit+id'] if rem['#x_xliff+unit+id'] \ + # else num resultatum.append(' ') resultatum.append(' ') @@ -449,7 +435,8 @@ class HXLTMCLI: if xsource: if not rem[xsource]: resultatum.append( - ' ') + ' ') print('ERROR:', unit_id, xsource) # continue else: @@ -481,34 +468,39 @@ class HXLTMCLI: new_txt.write(ln + "\n") # print (ln) - def hxltm2csv_header(self, hxlated_header, fontem_linguam, objectivum_linguam): + def hxltm2csv_header( + self, hxlated_header, fontem_linguam, objectivum_linguam): """ - _[eng-Latn] Convert the Main HXL TM file to a single or source to target - XLIFF translation pair + _[eng-Latn] Convert the Main HXL TM file to a single or source to + target XLIFF translation pair [eng-Latn]_ -#item+id -> #x_xliff+unit+id -#meta+archivum -> #x_xliff+file -#item+wikidata+code -> #x_xliff+unit+note+note_category__wikidata -#meta+wikidata+code -> #x_xliff+unit+note+note_category__wikidata -#meta+item+url+list -> #x_xliff+unit+notes+note_category__url -#item+type+lat_dominium+list -> #x_xliff+group+group_0 (We will not implement deeper levels than 0 now) +#item+id -> #x_xliff+unit+id +#meta+archivum -> #x_xliff+file +#item+wikidata+code -> #x_xliff+unit+note+note_category__wikidata +#meta+wikidata+code -> #x_xliff+unit+note+note_category__wikidata +#meta+item+url+list -> #x_xliff+unit+notes+note_category__url +#item+type+lat_dominium+list -> #x_xliff+group+group_0 +# (We will not implement deeper levels than 0 now) [contextum: XLIFF srcLang] -#item(*)+i_ZZZ+is_ZZZZ -> #x_xliff+source+i_ZZZ+is_ZZZZ -#status(*)+i_ZZZ+is_ZZZZ+xliff -> #meta+x_xliff+segment_source+state+i_ZZZ+is_ZZZZ (XLIFF don't support) -#meta(*)+i_ZZZ+is_ZZZZ -> #x_xliff+unit+note+note_category__source -#meta(*)+i_ZZZ+is_ZZZZ+list -> #x_xliff+unit+notes+note_category__source +#item(*)+i_ZZZ+is_ZZZZ -> #x_xliff+source+i_ZZZ+is_ZZZZ +#status(*)+i_ZZZ+is_ZZZZ+xliff + -> #meta+x_xliff+segment_source+state+i_ZZZ+is_ZZZZ + (XLIFF don't support) +#meta(*)+i_ZZZ+is_ZZZZ -> #x_xliff+unit+note+note_category__source +#meta(*)+i_ZZZ+is_ZZZZ+list -> #x_xliff+unit+notes+note_category__source [contextum: XLIFF trgLang] -#item(*)+i_ZZZ+is_ZZZZ -> #x_xliff+target+i_ZZZ+is_ZZZZ -#status(*)+i_ZZZ+is_ZZZZ+xliff -> #x_xliff+segment+state+i_ZZZ+is_ZZZZ -#meta(*)+i_ZZZ+is_ZZZZ -> #x_xliff+unit+note+note_category__target -#meta(*)+i_ZZZ+is_ZZZZ+list -> #x_xliff+unit+notes+note_category__target +#item(*)+i_ZZZ+is_ZZZZ -> #x_xliff+target+i_ZZZ+is_ZZZZ +#status(*)+i_ZZZ+is_ZZZZ+xliff -> #x_xliff+segment+state+i_ZZZ+is_ZZZZ +#meta(*)+i_ZZZ+is_ZZZZ -> #x_xliff+unit+note+note_category__target +#meta(*)+i_ZZZ+is_ZZZZ+list -> #x_xliff+unit+notes+note_category__target _[eng-Latn] TODO: - Map XLIFF revisions back MateCat back to HXL TM - @see http://docs.oasis-open.org/xliff/xliff-core/v2.1/os/xliff-core-v2.1-os.html#revisions + @see http://docs.oasis-open.org/xliff/xliff-core/v2.1/os + /xliff-core-v2.1-os.html#revisions [eng-Latn]_ """ @@ -542,7 +534,8 @@ class HXLTMCLI: elif hxlated_header[idx] == '#item+wikidata+code' or \ hxlated_header[idx] == '#meta+wikidata+code': - hxlated_header[idx] = '#x_xliff+unit+note+note_category__wikidata' + hxlated_header[idx] = \ + '#x_xliff+unit+note+note_category__wikidata' continue elif hxlated_header[idx] == '#item+type+lat_dominium+list': @@ -585,16 +578,20 @@ class HXLTMCLI: if hxlated_header[idx].find(fon_ling) > -1: if hxlated_header[idx].find('+list') > -1: - hxlated_header[idx] = '#x_xliff+unit+notes+note_category__source' + hxlated_header[idx] = \ + '#x_xliff+unit+notes+note_category__source' else: - hxlated_header[idx] = '#x_xliff+unit+note+note_category__source' + hxlated_header[idx] = \ + '#x_xliff+unit+note+note_category__source' continue if hxlated_header[idx].find(obj_ling) > -1: if hxlated_header[idx].find('+list') > -1: - hxlated_header[idx] = '#x_xliff+unit+notes+note_category__target' + hxlated_header[idx] = \ + '#x_xliff+unit+notes+note_category__target' else: - hxlated_header[idx] = '#x_xliff+unit+note+note_category__target' + hxlated_header[idx] = \ + '#x_xliff+unit+note+note_category__target' continue # We will ignore other #metas @@ -604,8 +601,11 @@ class HXLTMCLI: class HXLTMUtil: + """HXL Trānslātiōnem Memoriam auxilium programmi + """ - def bcp47_from_hxlattrs(self, hashtag): + @staticmethod + def bcp47_from_hxlattrs(hashtag): """From a typical HXLTM hashtag, return only the bcp47 language code without require a complex table equivalence. @@ -629,7 +629,8 @@ class HXLTMUtil: return '' - def hxllangattrs_list_from_item(self, item): + @staticmethod + def hxllangattrs_list_from_item(item): """hxllangattrs_list_from_item get only the raw attr string part that is repeated severa times and mean the same logical group. @@ -662,17 +663,18 @@ class HXLTMUtil: result.add(rawstr) return result - def iso6393_from_hxlattrs(self, hashtag): - """From a typical HXLTM hashtag, return only the ISO 639-3 language code - without require a complex table equivalence. + @staticmethod + def iso6393_from_hxlattrs(hashtag): + """From a typical HXLTM hashtag, return only the ISO 639-3 language + code without require a complex table equivalence. Example: - >>> HXLTMUtil.iso6393_from_hxlattrs('#item+i_ar+i_arb+is_arab') - 'arb' - >>> HXLTMUtil.iso6393_from_hxlattrs('#item+i_ar') - '' - >>> HXLTMUtil.iso6393_from_hxlattrs('#item+i_pt+i_por+is_latn+alt+list') - 'por' +>>> HXLTMUtil.iso6393_from_hxlattrs('#item+i_ar+i_arb+is_arab') +'arb' +>>> HXLTMUtil.iso6393_from_hxlattrs('#item+i_ar') +'' +>>> HXLTMUtil.iso6393_from_hxlattrs('#item+i_pt+i_por+is_latn+alt+list') +'por' Args: hashtag ([String]): A hashtag string @@ -693,7 +695,8 @@ class HXLTMUtil: return '' - def iso115924_from_hxlattrs(self, hashtag): + @staticmethod + def iso115924_from_hxlattrs(hashtag): """From a typical HXLTM hashtag, return only the ISO 115924 writting system without require a complex table equivalence. @@ -711,41 +714,14 @@ class HXLTMUtil: """ if hashtag: parts = hashtag.lower().split('+') - # '#item+i_ar+i_arb+is_arab' => ['#item', 'i_ar', 'i_arb', 'is_arab'] - # print(parts) for k in parts: if k.startswith('is_'): return k.replace('is_', '') return '' - def item_linguam_keys_grouped(self, item): - """From a item with several non-grouped keys, return list of grouped - results per different language - - Example: - > >> item = {'#item+i_pt+i_por+is_latn': '','#item+i_pt+i_por+is_latn+alt+list': '', '#meta+item+i_pt+i_por+is_latn': ''} - > >> HXLTMUtil.item_linguam_keys_grouped(item) - 'arab' - - Args: - hashtag ([String]): A linguam code - - Returns: - [String]: HXL Attributes - """ - # print(item) - alllangs = set() - for k in item: - iso6393 = HXLTMUtil.iso6393_from_hxlattrs(k) - if iso6393: - alllangs.add(iso6393) - - # TODO: finish item_linguam_keys_grouped. Maybe with hxl.model.TagPattern? - # @see https://github.com/HXLStandard/libhxl-python/blob/main/hxl/model.py#L29 - return '' - - def linguam_2_hxlattrs(self, linguam): + @staticmethod + def linguam_2_hxlattrs(linguam): """linguam_2_hxlattrs Example: @@ -763,7 +739,8 @@ class HXLTMUtil: iso6393, iso115924 = list(linguam.lower().split('-')) return '+i_' + iso6393 + '+is_' + iso115924 - def xliff_item_relevant_options(self, item): + @staticmethod + def xliff_item_relevant_options(item): """From an dict (python object) return only keys that start with #x_xliff @@ -784,7 +761,8 @@ class HXLTMUtil: return item_neo - def tmx_item_relevan_options(self, item): + @staticmethod + def tmx_item_relevan_options(item): return item # item_neo = {} @@ -797,14 +775,16 @@ class HXLTMUtil: # return item_neo - def xliff_item_xliff_source_key(self, item): + @staticmethod + def xliff_item_xliff_source_key(item): for k in item: if k.startswith('#x_xliff+source'): return k return None - def xliff_item_xliff_target_key(self, item): + @staticmethod + def xliff_item_xliff_target_key(item): for k in item: if k.startswith('#x_xliff+target'): return k @@ -1021,3 +1001,10 @@ if __name__ == "__main__": args = hxltmcli.make_args_hxltmcli() hxltmcli.execute_cli(args) + + +def exec_from_console_scripts(): + hxltmcli_ = HXLTMCLI() + args_ = hxltmcli_.make_args_hxltmcli() + + hxltmcli_.execute_cli(args_) diff --git a/hxlm/core/bin/hxltmcli.py b/hxlm/core/bin/hxltmcli.py new file mode 120000 index 0000000..e33c6d1 --- /dev/null +++ b/hxlm/core/bin/hxltmcli.py @@ -0,0 +1 @@ +../../../bin/hxltmcli \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index ac5d8ac..3420bdf 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,126 +1,127 @@ -[metadata] -# Emerson Rocha notes: -# using username '-eticaai' to provide some namespace -# we're not planning to release this on pip (at least not soon, since -# exist both security implications and voluntarily void estimulate -# people to publish forks) -# -# Also, note that for > HDSL3, like HDSL4, something that on humanitarian -# context would be considered 'Strictly Confidential', so the idea of -# design an plugin system is taking in account regional usage, like -# Brazilian PPCAAM or related to comptroller/ombudsman/organized crime/etc -# be able to extend/validate. - -# NOTE: Would "hdp-toolchain" be a good umbrela name for all things the current -# package offers? See -# - https://pt.wikipedia.org/wiki/Conjunto_de_ferramentas_GNU -# - https://en.wikipedia.org/wiki/Toolchain -# name = hxlm-base-eticaai # New temporary name 'hdp-toolchain' on 2021-03-30 -name = hdp-toolchain -version = 0.8.7.3 -author = Emerson Rocha -author_email = rocha@ieee.org -description = HDP Declarative Programming toolchain (working draft) -long_description = file: README.md -long_description_content_type = text/markdown -url = https://github.com/EticaAI/HXL-Data-Science-file-formats -project_urls = - Bug Tracker = https://github.com/EticaAI/HXL-Data-Science-file-formats/issues -classifiers = - Programming Language :: Python :: 3 - License :: OSI Approved :: The Unlicense (Unlicense) - Operating System :: OS Independent - Topic :: Scientific/Engineering :: Artificial Intelligence - Typing :: Typed - Topic :: Utilities - -[options] - -python_requires = >=3.7 -install_requires = - libhxl - pygments - cryptography - keyring - -namespace_packages = hxlm -packages = - hxlm.core - hxlm.core._dt - hxlm.core.bin - hxlm.core.hdp - hxlm.core.hdp.util - hxlm.core.htype - hxlm.core.internal - hxlm.core.io - hxlm.core.schema - hxlm.core.urn - hxlm.data.baseline - hxlm.data.exemplum - hxlm.data.hxl - hxlm.data.udhr - hxlm.lisp - hxlm.lisp.bootstrapper - hxlm.locale - hxlm.locale.pt.LC_MESSAGES - hxlm.ontologia - hxlm.ontologia.python - hxlm.ontologia.python.hdp - hxlm.routing - hxlm.taxonomy - hxlm.plugin.xa_amnesty - hxlm.plugin.xa_gdpr - hxlm.plugin.xa_humanitarian - hxlm.plugin.xa_lgpd - hxlm.plugin.xa_rc - hxlm.plugin.xa_sus - hxlm.plugin.xl_por - hxlm.plugin.xz_eticaai - -include_package_data = True - -[options.extras_require] -hxl2tab = - hug -hxlquickimport = - hug - slugify -hxlquickmeta = - pandas - Orange3 -full = - hug - slugify - Orange3 - - -[options.package_data] -# * = *.yml, *.json, *.csv -hxlm.core.bin = *.yml -hxlm.core.schema = *.yml, *.json, *.csv -hxlm.data.baseline = *.yml, *.json, *.csv -hxlm.data.exemplum = *.yml, *.json, *.csv -hxlm.data.hxl = *.yml, *.json, *.csv -hxlm.data.udhr = *.yml, *.json, *.csv -hxlm.locale = *.pot, *.po -hxlm.locale.pt.LC_MESSAGES = *.po, *.mo -hxlm.ontologia = *.yml, *.json, .ebnf, *.md -# TODO: implement some way to compile the .mo files from .po translations - -[options.entry_points] -console_scripts = - ; urnresolver = hxlm.core.bin.urnresolver:main - ; urnresolver = hxlm.core.bin.urnresolver:urnresolver - hdpcli = hxlm.core.bin.hdpcli:exec_from_console_scripts - urnresolver = hxlm.core.bin.urnresolver:exec_from_console_scripts - urnprovider-local = hxlm.core.bin.urnprovider_local:exec_from_console_scripts - hxl2tab = hxlm.core.bin.hxl2tab:exec_from_console_scripts - hxlquickimport = hxlm.core.bin.hxlquickimport:exec_from_console_scripts - hxlquickmeta = hxlm.core.bin.hxlquickmeta:exec_from_console_scripts - -# libhxl -# pyyaml # for config files, but we could also allow json? - -# TODO: maybe? -# - https://github.com/Julian/jsonschema -# - https://python-jsonschema.readthedocs.io/en/stable/ +[metadata] +# Emerson Rocha notes: +# using username '-eticaai' to provide some namespace +# we're not planning to release this on pip (at least not soon, since +# exist both security implications and voluntarily void estimulate +# people to publish forks) +# +# Also, note that for > HDSL3, like HDSL4, something that on humanitarian +# context would be considered 'Strictly Confidential', so the idea of +# design an plugin system is taking in account regional usage, like +# Brazilian PPCAAM or related to comptroller/ombudsman/organized crime/etc +# be able to extend/validate. + +# NOTE: Would "hdp-toolchain" be a good umbrela name for all things the current +# package offers? See +# - https://pt.wikipedia.org/wiki/Conjunto_de_ferramentas_GNU +# - https://en.wikipedia.org/wiki/Toolchain +# name = hxlm-base-eticaai # New temporary name 'hdp-toolchain' on 2021-03-30 +name = hdp-toolchain +version = 0.8.8.0 +author = Emerson Rocha +author_email = rocha@ieee.org +description = HDP Declarative Programming toolchain (working draft) +long_description = file: README.md +long_description_content_type = text/markdown +url = https://github.com/EticaAI/HXL-Data-Science-file-formats +project_urls = + Bug Tracker = https://github.com/EticaAI/HXL-Data-Science-file-formats/issues +classifiers = + Programming Language :: Python :: 3 + License :: OSI Approved :: The Unlicense (Unlicense) + Operating System :: OS Independent + Topic :: Scientific/Engineering :: Artificial Intelligence + Typing :: Typed + Topic :: Utilities + +[options] + +python_requires = >=3.7 +install_requires = + libhxl + pygments + cryptography + keyring + +namespace_packages = hxlm +packages = + hxlm.core + hxlm.core._dt + hxlm.core.bin + hxlm.core.hdp + hxlm.core.hdp.util + hxlm.core.htype + hxlm.core.internal + hxlm.core.io + hxlm.core.schema + hxlm.core.urn + hxlm.data.baseline + hxlm.data.exemplum + hxlm.data.hxl + hxlm.data.udhr + hxlm.lisp + hxlm.lisp.bootstrapper + hxlm.locale + hxlm.locale.pt.LC_MESSAGES + hxlm.ontologia + hxlm.ontologia.python + hxlm.ontologia.python.hdp + hxlm.routing + hxlm.taxonomy + hxlm.plugin.xa_amnesty + hxlm.plugin.xa_gdpr + hxlm.plugin.xa_humanitarian + hxlm.plugin.xa_lgpd + hxlm.plugin.xa_rc + hxlm.plugin.xa_sus + hxlm.plugin.xl_por + hxlm.plugin.xz_eticaai + +include_package_data = True + +[options.extras_require] +hxl2tab = + hug +hxlquickimport = + hug + slugify +hxlquickmeta = + pandas + Orange3 +full = + hug + slugify + Orange3 + + +[options.package_data] +# * = *.yml, *.json, *.csv +hxlm.core.bin = *.yml +hxlm.core.schema = *.yml, *.json, *.csv +hxlm.data.baseline = *.yml, *.json, *.csv +hxlm.data.exemplum = *.yml, *.json, *.csv +hxlm.data.hxl = *.yml, *.json, *.csv +hxlm.data.udhr = *.yml, *.json, *.csv +hxlm.locale = *.pot, *.po +hxlm.locale.pt.LC_MESSAGES = *.po, *.mo +hxlm.ontologia = *.yml, *.json, .ebnf, *.md +# TODO: implement some way to compile the .mo files from .po translations + +[options.entry_points] +console_scripts = + ; urnresolver = hxlm.core.bin.urnresolver:main + ; urnresolver = hxlm.core.bin.urnresolver:urnresolver + hdpcli = hxlm.core.bin.hdpcli:exec_from_console_scripts + hxltmcli = hxlm.core.bin.hxltmcli:exec_from_console_scripts + urnresolver = hxlm.core.bin.urnresolver:exec_from_console_scripts + urnprovider-local = hxlm.core.bin.urnprovider_local:exec_from_console_scripts + hxl2tab = hxlm.core.bin.hxl2tab:exec_from_console_scripts + hxlquickimport = hxlm.core.bin.hxlquickimport:exec_from_console_scripts + hxlquickmeta = hxlm.core.bin.hxlquickmeta:exec_from_console_scripts + +# libhxl +# pyyaml # for config files, but we could also allow json? + +# TODO: maybe? +# - https://github.com/Julian/jsonschema +# - https://python-jsonschema.readthedocs.io/en/stable/