diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..5f7e9aa --- /dev/null +++ b/.coveragerc @@ -0,0 +1,12 @@ +[run] +branch = True +omit = + azure_monitor/setup.py + azure_monitor/tests/* + +[report] +fail_under = 98 +show_missing = True +omit = + azure_monitor/setup.py + azure_monitor/tests/* diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..c7299ee --- /dev/null +++ b/.flake8 @@ -0,0 +1,18 @@ +[flake8] +ignore = + E501 # line too long, defer to black + F401 # unused import, defer to pylint + W503 # allow line breaks after binary ops, not after + E203 # allow whitespace before ':' (https://github.com/psf/black#slices) +exclude = + .bzr + .git + .hg + .svn + .tox + CVS + .venv*/ + venv*/ + target + __pycache__ + diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..a61e72c --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,5 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. + +# For anything not explicitly taken by someone else: +* @hectorhdzg @lzchen diff --git a/.gitignore b/.gitignore index 894a44c..a127971 100644 --- a/.gitignore +++ b/.gitignore @@ -102,3 +102,6 @@ venv.bak/ # mypy .mypy_cache/ + +# vscode +.vscode/ diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 0000000..837d197 --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,16 @@ +[settings] +include_trailing_comma=True +force_grid_wrap=0 +use_parentheses=True +line_length=79 + +; 3 stands for Vertical Hanging Indent, e.g. +; from third_party import ( +; lib1, +; lib2, +; lib3, +; ) +; docs: https://github.com/timothycrosley/isort#multi-line-output-modes +multi_line_output=3 +skip=target +known_third_party=opentelemetry diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..4e25a1e --- /dev/null +++ b/.pylintrc @@ -0,0 +1,489 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-whitelist= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS,gen + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use. +jobs=0 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Specify a configuration file. +#rcfile= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=missing-docstring, + fixme, # Warns about FIXME, TODO, etc. comments. + too-few-public-methods, # Might be good to re-enable this later. + too-many-instance-attributes, + too-many-arguments, + ungrouped-imports, # Leave this up to isort + wrong-import-order, # Leave this up to isort + bad-continuation, # Leave this up to black + line-too-long, # Leave this up to black + exec-used + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +# enable=c-extension-no-member + + +[REPORTS] + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +#evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +#output-format=text + +# Tells whether to display a full report or only the messages. +#reports=no + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit + + +[LOGGING] + +# Format style used to check logging format string. `old` means using % +# formatting, while `new` is for `{}` formatting. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. Available dictionaries: none. To make it working +# install python-enchant package.. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to indicated private dictionary in +# --spelling-private-dict-file option instead of raising a message. +spelling-store-unknown-words=no + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +#ignore-mixin-members=yes + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +#ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +#ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis. It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=79 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +no-space-check=trailing-comma, + dict-separator + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. +#argument-rgx= + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. +#class-attribute-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=any + +# Regular expression matching correct constant names. Overrides const-naming- +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=_, + log, + logger + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=yes + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. +variable-rgx=(([a-z_][a-z0-9_]{1,})|(_[a-z0-9_]*)|(__[a-z][a-z0-9_]+__))$ + + +[IMPORTS] + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=yes + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules=optparse,tkinter.tix + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled). +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled). +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library=six + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=cls + + +[DESIGN] + +# Maximum number of arguments for function / method. +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in an if statement. +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception". +overgeneral-exceptions=Exception diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 0000000..3dcf0e5 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,14 @@ +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details +version: 2 + +sphinx: + configuration: docs/conf.py + +build: + image: latest + +python: + version: 3.8 + install: + - requirements: docs-requirements.txt diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..2cfc918 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,20 @@ +dist: xenial + +language: python + +python: + - '3.4' + - '3.5' + - '3.6' + - '3.7' + - '3.8' + +install: + - pip install tox-travis + +script: + - tox + +after_success: + - pip install codecov + - codecov -v diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..ca2f99e --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,13 @@ +# Changelog + +## Unreleased + +## 0.2.0 +Released 2020-03-31 + +- Initial beta release + +## 0.1.0 +Released 2019-11-06 + +- Initial alpha release diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..8eeee9c --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,14 @@ + +# Contributing + +This project welcomes contributions and suggestions. Most contributions require you to agree to a +Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us +the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com. + +When you submit a pull request, a CLA bot will automatically determine whether you need to provide +a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions +provided by the bot. You will only need to do this once across all repos using our CLA. + +This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). +For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or +contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. diff --git a/LICENSE b/LICENSE index fea9e74..3d8b93b 100644 --- a/LICENSE +++ b/LICENSE @@ -1,202 +1,21 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - \ No newline at end of file + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/README.md b/README.md index b81a84e..112d3ff 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,169 @@ - -# Contributing - -This project welcomes contributions and suggestions. Most contributions require you to agree to a -Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us -the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com. - -When you submit a pull request, a CLA bot will automatically determine whether you need to provide -a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions -provided by the bot. You will only need to do this once across all repos using our CLA. - -This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). -For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or -contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. +# OpenTelemetry Azure Monitor SDKs and Exporters + +[![Gitter chat](https://img.shields.io/gitter/room/Microsoft/azure-monitor-python)](https://gitter.im/Microsoft/azure-monitor-python) +[![Build status](https://travis-ci.org/microsoft/opentelemetry-azure-monitor-python.svg?branch=master)](https://travis-ci.org/microsoft/opentelemetry-azure-monitor-python) +[![PyPI version](https://badge.fury.io/py/opentelemetry-azure-monitor.svg)](https://badge.fury.io/py/opentelemetry-azure-monitor) + +## Installation + +```sh +pip install opentelemetry-azure-monitor +``` + +## Documentation + +The online documentation is available at https://opentelemetry-azure-monitor-python.readthedocs.io/. + + +## Usage + +### Trace + +The **Azure Monitor Trace Exporter** allows you to export [OpenTelemetry](https://opentelemetry.io/) traces to [Azure Monitor](https://docs.microsoft.com/azure/azure-monitor/). + +This example shows how to send a span "hello" to Azure Monitor. + +* Create an Azure Monitor resource and get the instrumentation key, more information can be found [here](https://docs.microsoft.com/azure/azure-monitor/app/create-new-resource). +* Place your instrumentation key in a `connection string` and directly into your code. +* Alternatively, you can specify your `connection string` in an environment variable ``APPLICATIONINSIGHTS_CONNECTION_STRING``. + +```python +from azure_monitor import AzureMonitorSpanExporter +from opentelemetry import trace +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchExportSpanProcessor + +trace.set_tracer_provider(TracerProvider()) + +# We tell OpenTelemetry who it is that is creating spans. In this case, we have +# no real name (no setup.py), so we make one up. If we had a version, we would +# also specify it here. +tracer = trace.get_tracer(__name__) + +exporter = AzureMonitorSpanExporter( + connection_string='InstrumentationKey=', +) + +# SpanExporter receives the spans and send them to the target location. +span_processor = BatchExportSpanProcessor(exporter) +trace.get_tracer_provider().add_span_processor(span_processor) + +with tracer.start_as_current_span('hello'): + print('Hello World!') +``` + +#### Integrations + +OpenTelemetry also supports several [integrations](https://github.com/open-telemetry/opentelemetry-python/tree/master/ext) which allows to integrate with third party libraries. + +This example shows how to integrate with the [requests](https://2.python-requests.org/en/master/)_ library. + +* Create an Azure Monitor resource and get the instrumentation key, more information can be found [here](https://docs.microsoft.com/azure/azure-monitor/app/create-new-resource). +* Install the `requests` integration package using ``pip install opentelemetry-ext-http-requests``. +* Place your instrumentation key in a `connection string` and directly into your code. +* Alternatively, you can specify your `connection string` in an environment variable ``APPLICATIONINSIGHTS_CONNECTION_STRING``. + +```python +import requests + +from azure_monitor import AzureMonitorSpanExporter +from opentelemetry import trace +from opentelemetry.ext import http_requests +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchExportSpanProcessor + +trace.set_tracer_provider(TracerProvider()) +tracer_provider = trace.get_tracer_provider() + +exporter = AzureMonitorSpanExporter( + connection_string='InstrumentationKey=', + ) +span_processor = BatchExportSpanProcessor(exporter) +tracer_provider.add_span_processor(span_processor) + +http_requests.enable(tracer_provider) +response = requests.get(url="https://azure.microsoft.com/") +``` + +#### Modifying Traces + +* You can pass a callback function to the exporter to process telemetry before it is exported. +* Your callback function can return `False` if you do not want this envelope exported. +* Your callback function must accept an [envelope](https://github.com/microsoft/opentelemetry-exporters-python/blob/master/azure_monitor/src/azure_monitor/protocol.py#L80) data type as its parameter. +* You can see the schema for Azure Monitor data types in the envelopes [here](https://github.com/microsoft/opentelemetry-exporters-python/blob/master/azure_monitor/src/azure_monitor/protocol.py). +* The `AzureMonitorSpanExporter` handles `Data` data types. + +```python +from azure_monitor import AzureMonitorSpanExporter +from opentelemetry import trace +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchExportSpanProcessor + +# Callback function to add os_type: linux to span properties +def callback_function(envelope): + envelope.data.baseData.properties['os_type'] = 'linux' + return True + +exporter = AzureMonitorSpanExporter( + connection_string='InstrumentationKey=' +) +exporter.add_telemetry_processor(callback_function) + +trace.set_tracer_provider(TracerProvider()) +tracer = trace.get_tracer(__name__) +span_processor = BatchExportSpanProcessor(exporter) +trace.get_tracer_provider().add_span_processor(span_processor) + +with tracer.start_as_current_span('hello'): + print('Hello World!') +``` + +### Metrics + +The **Azure Monitor Metrics Exporter** allows you to export metrics to [Azure Monitor](https://docs.microsoft.com/azure/azure-monitor/). + +This example shows how to track a counter metric and send it as telemetry every export interval. + +* Create an Azure Monitor resource and get the instrumentation key, more information can be found [here](https://docs.microsoft.com/azure/azure-monitor/app/create-new-resource). +* Place your instrumentation key in a `connection string` and directly into your code. +* Alternatively, you can specify your `connection string` in an environment variable ``APPLICATIONINSIGHTS_CONNECTION_STRING``. + +```python +import time + +from azure_monitor import AzureMonitorMetricsExporter +from opentelemetry import metrics +from opentelemetry.sdk.metrics import Counter, MeterProvider +from opentelemetry.sdk.metrics.export.controller import PushController + +metrics.set_meter_provider(MeterProvider()) +meter = metrics.get_meter(__name__) +exporter = AzureMonitorMetricsExporter( + connection_string='InstrumentationKey=' +) +controller = PushController(meter, exporter, 5) + +requests_counter = meter.create_metric( + name="requests", + description="number of requests", + unit="1", + value_type=int, + metric_type=Counter, + label_keys=("environment",), +) + +testing_label_set = meter.get_label_set({"environment": "testing"}) + +requests_counter.add(25, testing_label_set) +time.sleep(100) +``` + +# References + +[Azure Monitor](https://docs.microsoft.com/azure/azure-monitor/) + +[OpenTelemetry Project](https://opentelemetry.io/) + +[OpenTelemetry Python Client](https://github.com/open-telemetry/opentelemetry-python) + +[Azure Monitor Python Gitter](https://gitter.im/Microsoft/azure-monitor-python) diff --git a/azure_monitor/README.rst b/azure_monitor/README.rst deleted file mode 100644 index 3a07f4d..0000000 --- a/azure_monitor/README.rst +++ /dev/null @@ -1,19 +0,0 @@ -OpenTelemetry Azure Monitor Exporters -===================================== - -This library is the home of the Azure Monitor Exporters which is an integration for OpenTelemetry. - -Installation ------------- - -:: - - pip install opentelemetry-azure-monitor-exporter - -References ----------- - -* `Azure Monitor `_ -* `OpenTelemetry Project `_ -* `OpenTelemetry Python Client `_ - diff --git a/azure_monitor/examples/client.py b/azure_monitor/examples/client.py deleted file mode 100644 index fa78dd9..0000000 --- a/azure_monitor/examples/client.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import requests - -from azure_monitor import AzureMonitorSpanExporter -from opentelemetry import trace -from opentelemetry.ext import http_requests -from opentelemetry.sdk.trace import Tracer -from opentelemetry.sdk.trace.export import BatchExportSpanProcessor - -trace.set_preferred_tracer_implementation(lambda T: Tracer()) -tracer = trace.tracer() -http_requests.enable(tracer) -span_processor = BatchExportSpanProcessor(AzureMonitorSpanExporter()) -tracer.add_span_processor(span_processor) - -response = requests.get(url="http://127.0.0.1:5000/") -span_processor.shutdown() diff --git a/azure_monitor/examples/metrics/auto_collector.py b/azure_monitor/examples/metrics/auto_collector.py new file mode 100644 index 0000000..cd899d7 --- /dev/null +++ b/azure_monitor/examples/metrics/auto_collector.py @@ -0,0 +1,27 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +from opentelemetry import metrics +from opentelemetry.sdk.metrics import MeterProvider +from opentelemetry.sdk.metrics.export.controller import PushController + +from azure_monitor import AzureMonitorMetricsExporter +from azure_monitor.sdk.auto_collection import AutoCollection + +metrics.set_meter_provider(MeterProvider()) +meter = metrics.get_meter(__name__) +exporter = AzureMonitorMetricsExporter( + connection_string="InstrumentationKey=" +) +controller = PushController(meter, exporter, 5) + +testing_label_set = meter.get_label_set({"environment": "testing"}) + +# Automatically collect standard metrics +auto_collection = AutoCollection(meter=meter, label_set=testing_label_set) + +# To configure a separate export interval specific for standard metrics +# meter_standard = metrics.get_meter(__name__ + "_standard") +# controller _standard = PushController(meter_standard, exporter, 30) +# _auto_collection = AutoCollection(meter=meter_standard, label_set=testing_label_set) + +input("Press any key to exit...") diff --git a/azure_monitor/examples/metrics/client.py b/azure_monitor/examples/metrics/client.py new file mode 100644 index 0000000..57bec03 --- /dev/null +++ b/azure_monitor/examples/metrics/client.py @@ -0,0 +1,26 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +# pylint: disable=import-error +# pylint: disable=no-member +# pylint: disable=no-name-in-module +import requests +from opentelemetry import trace +from opentelemetry.ext import http_requests +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchExportSpanProcessor + +from azure_monitor import AzureMonitorSpanExporter + +trace.set_tracer_provider(TracerProvider()) +tracer = trace.get_tracer(__name__) +http_requests.enable(trace.get_tracer_provider()) +span_processor = BatchExportSpanProcessor( + AzureMonitorSpanExporter( + connection_string="InstrumentationKey=" + ) +) +trace.get_tracer_provider().add_span_processor(span_processor) + +response = requests.get(url="http://google.com") + +input("Press any key to exit...") diff --git a/azure_monitor/examples/metrics/simple.py b/azure_monitor/examples/metrics/simple.py new file mode 100644 index 0000000..3547764 --- /dev/null +++ b/azure_monitor/examples/metrics/simple.py @@ -0,0 +1,29 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +from opentelemetry import metrics +from opentelemetry.sdk.metrics import Counter, MeterProvider +from opentelemetry.sdk.metrics.export.controller import PushController + +from azure_monitor import AzureMonitorMetricsExporter + +metrics.set_meter_provider(MeterProvider()) +meter = metrics.get_meter(__name__) +exporter = AzureMonitorMetricsExporter( + connection_string="InstrumentationKey=" +) +controller = PushController(meter, exporter, 5) + +requests_counter = meter.create_metric( + name="requests", + description="number of requests", + unit="1", + value_type=int, + metric_type=Counter, + label_keys=("environment",), +) + +testing_label_set = meter.get_label_set({"environment": "testing"}) + +requests_counter.add(25, testing_label_set) + +input("Press any key to exit...") diff --git a/azure_monitor/examples/server.py b/azure_monitor/examples/server.py deleted file mode 100644 index 1437e69..0000000 --- a/azure_monitor/examples/server.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import flask -import requests - -from azure_monitor import AzureMonitorSpanExporter -from opentelemetry import trace -from opentelemetry.ext import http_requests -from opentelemetry.ext.wsgi import OpenTelemetryMiddleware -from opentelemetry.sdk.trace import Tracer -from opentelemetry.sdk.trace.export import BatchExportSpanProcessor - -trace.set_preferred_tracer_implementation(lambda T: Tracer()) - -http_requests.enable(trace.tracer()) -span_processor = BatchExportSpanProcessor(AzureMonitorSpanExporter()) -trace.tracer().add_span_processor(span_processor) - -app = flask.Flask(__name__) -app.wsgi_app = OpenTelemetryMiddleware(app.wsgi_app) - - -@app.route("/") -def hello(): - with trace.tracer().start_span("parent"): - requests.get("https://www.wikipedia.org/wiki/Rabbit") - return "hello" - - -if __name__ == "__main__": - app.run(debug=True) - span_processor.shutdown() diff --git a/azure_monitor/examples/trace.py b/azure_monitor/examples/trace.py deleted file mode 100644 index 76f5507..0000000 --- a/azure_monitor/examples/trace.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -from azure_monitor import AzureMonitorSpanExporter -from opentelemetry import trace -from opentelemetry.sdk.trace import Tracer -from opentelemetry.sdk.trace.export import SimpleExportSpanProcessor - -trace.set_preferred_tracer_implementation(lambda T: Tracer()) -tracer = trace.tracer() -tracer.add_span_processor( - SimpleExportSpanProcessor(AzureMonitorSpanExporter()) -) - -with tracer.start_span("hello") as span: - print("Hello, World!") diff --git a/azure_monitor/examples/traces/README.md b/azure_monitor/examples/traces/README.md new file mode 100644 index 0000000..383bf92 --- /dev/null +++ b/azure_monitor/examples/traces/README.md @@ -0,0 +1,52 @@ + +## Installation + +```sh +$ pip install opentelemetry-azure-monitor-exporter +``` + +## Run the Applications + +### Trace + +* Update the code in trace.py to use your `INSTRUMENTATION_KEY` + +* Run the sample + +```sh +$ # from this directory +$ python trace.py +``` + +### Request + +* Update the code in request.py to use your `INSTRUMENTATION_KEY` + +* Run the sample + +```sh +$ pip install opentelemetry-ext-http-requests +$ # from this directory +$ python request.py +``` + +### Server + +* Update the code in server.py to use your `INSTRUMENTATION_KEY` + +* Run the sample + +```sh +$ pip install opentelemetry-ext-http-requests +$ pip install opentelemetry-ext-wsgi +$ # from this directory +$ python server.py +``` + +* Open http://localhost:8080/ + + +## Explore the data + +After running the applications, data would be available in [Azure]( +https://docs.microsoft.com/en-us/azure/azure-monitor/app/app-insights-overview#where-do-i-see-my-telemetry) diff --git a/azure_monitor/examples/traces/client.py b/azure_monitor/examples/traces/client.py new file mode 100644 index 0000000..37908dd --- /dev/null +++ b/azure_monitor/examples/traces/client.py @@ -0,0 +1,26 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +# pylint: disable=import-error +# pylint: disable=no-member +# pylint: disable=no-name-in-module +import requests +from opentelemetry import trace +from opentelemetry.ext import http_requests +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchExportSpanProcessor + +from azure_monitor import AzureMonitorSpanExporter + +trace.set_tracer_provider(TracerProvider()) +tracer = trace.get_tracer(__name__) +http_requests.enable(trace.get_tracer_provider()) +span_processor = BatchExportSpanProcessor( + AzureMonitorSpanExporter( + # connection_string="InstrumentationKey=" + ) +) +trace.get_tracer_provider().add_span_processor(span_processor) + +response = requests.get(url="http://127.0.0.1:8080/") + +input("Press any key to exit...") diff --git a/azure_monitor/examples/traces/request.py b/azure_monitor/examples/traces/request.py new file mode 100644 index 0000000..5b26868 --- /dev/null +++ b/azure_monitor/examples/traces/request.py @@ -0,0 +1,28 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +# pylint: disable=import-error +# pylint: disable=no-member +# pylint: disable=no-name-in-module +import requests +from opentelemetry import trace +from opentelemetry.ext import http_requests +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import SimpleExportSpanProcessor + +from azure_monitor import AzureMonitorSpanExporter + +trace.set_tracer_provider(TracerProvider()) + +http_requests.enable(trace.get_tracer_provider()) +span_processor = SimpleExportSpanProcessor( + AzureMonitorSpanExporter( + connection_string="InstrumentationKey=" + ) +) +trace.get_tracer_provider().add_span_processor(span_processor) +tracer = trace.get_tracer(__name__) + +with tracer.start_as_current_span("parent"): + response = requests.get("https://azure.microsoft.com/", timeout=5) + +input("Press any key to exit...") diff --git a/azure_monitor/examples/traces/server.py b/azure_monitor/examples/traces/server.py new file mode 100644 index 0000000..7be59b0 --- /dev/null +++ b/azure_monitor/examples/traces/server.py @@ -0,0 +1,45 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +# pylint: disable=import-error +# pylint: disable=no-member +# pylint: disable=no-name-in-module +import requests +from opentelemetry import trace +from opentelemetry.ext import http_requests +from opentelemetry.ext.wsgi import OpenTelemetryMiddleware +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchExportSpanProcessor + +import flask +from azure_monitor import AzureMonitorSpanExporter + +# The preferred tracer implementation must be set, as the opentelemetry-api +# defines the interface with a no-op implementation. +trace.set_tracer_provider(TracerProvider()) +tracer = trace.get_tracer(__name__) + +exporter = AzureMonitorSpanExporter( + connection_string="InstrumentationKey=" +) + +# SpanExporter receives the spans and send them to the target location. +span_processor = BatchExportSpanProcessor(exporter) +trace.get_tracer_provider().add_span_processor(span_processor) + +# Integrations are the glue that binds the OpenTelemetry API and the +# frameworks and libraries that are used together, automatically creating +# Spans and propagating context as appropriate. +http_requests.enable(trace.get_tracer_provider()) +app = flask.Flask(__name__) +app.wsgi_app = OpenTelemetryMiddleware(app.wsgi_app) + + +@app.route("/") +def hello(): + with tracer.start_as_current_span("parent"): + requests.get("https://www.wikipedia.org/wiki/Rabbit") + return "hello" + + +if __name__ == "__main__": + app.run(host="localhost", port=8080, threaded=True) diff --git a/azure_monitor/examples/traces/trace.py b/azure_monitor/examples/traces/trace.py new file mode 100644 index 0000000..6bbce67 --- /dev/null +++ b/azure_monitor/examples/traces/trace.py @@ -0,0 +1,29 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +from opentelemetry import trace +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchExportSpanProcessor + +from azure_monitor import AzureMonitorSpanExporter + + +# Callback function to add os_type: linux to span properties +def callback_function(envelope): + envelope.data.base_data.properties["os_type"] = "linux" + return True + + +exporter = AzureMonitorSpanExporter( + connection_string="InstrumentationKey=" +) +exporter.add_telemetry_processor(callback_function) + +trace.set_tracer_provider(TracerProvider()) +tracer = trace.get_tracer(__name__) +span_processor = BatchExportSpanProcessor(exporter) +trace.get_tracer_provider().add_span_processor(span_processor) + +with tracer.start_as_current_span("hello"): + print("Hello, World!") + +input("Press any key to exit...") diff --git a/azure_monitor/setup.cfg b/azure_monitor/setup.cfg index 465ed0b..0157b80 100644 --- a/azure_monitor/setup.cfg +++ b/azure_monitor/setup.cfg @@ -1,19 +1,19 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. [metadata] -name = opentelemetry-azure-monitor-exporter +name = opentelemetry-azure-monitor description = Azure Monitor integration for OpenTelemetry long_description = file: README.rst long_description_content_type = text/x-rst author = Microsoft author_email = appinsightssdk@microsoft.com -url = https://github.com/microsoft/opentelemetry-exporters-python +url = https://github.com/microsoft/opentelemetry-azure-monitor-python platforms = any -license = Apache-2.0 +license = MIT classifiers = - Development Status :: 3 - Alpha + Development Status :: 4 - Beta Intended Audience :: Developers - License :: OSI Approved :: Apache Software License + License :: OSI Approved :: MIT License Programming Language :: Python Programming Language :: Python :: 3 Programming Language :: Python :: 3.4 @@ -27,8 +27,10 @@ package_dir= =src packages=find_namespace: install_requires = - opentelemetry-api - opentelemetry-sdk + opentelemetry-api >= 0.5b0 + opentelemetry-sdk >= 0.5b0 + psutil >= 5.6.3 + requests ~= 2.0 [options.packages.find] where = src diff --git a/azure_monitor/setup.py b/azure_monitor/setup.py index e38bdd3..ae8edff 100644 --- a/azure_monitor/setup.py +++ b/azure_monitor/setup.py @@ -5,9 +5,7 @@ import setuptools BASE_DIR = os.path.dirname(__file__) -VERSION_FILENAME = os.path.join( - BASE_DIR, "src", "azure_monitor", "version.py" -) +VERSION_FILENAME = os.path.join(BASE_DIR, "src", "azure_monitor", "version.py") PACKAGE_INFO = {} with open(VERSION_FILENAME) as f: exec(f.read(), PACKAGE_INFO) diff --git a/azure_monitor/src/azure_monitor/__init__.py b/azure_monitor/src/azure_monitor/__init__.py index f3a6f7b..4a963e4 100644 --- a/azure_monitor/src/azure_monitor/__init__.py +++ b/azure_monitor/src/azure_monitor/__init__.py @@ -1,6 +1,6 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. -from azure_monitor.trace import AzureMonitorSpanExporter -from azure_monitor.version import __version__ # noqa +from azure_monitor.export.metrics import AzureMonitorMetricsExporter +from azure_monitor.export.trace import AzureMonitorSpanExporter -__all__ = ["AzureMonitorSpanExporter"] +__all__ = ["AzureMonitorMetricsExporter", "AzureMonitorSpanExporter"] diff --git a/azure_monitor/src/azure_monitor/export/__init__.py b/azure_monitor/src/azure_monitor/export/__init__.py new file mode 100644 index 0000000..e705a90 --- /dev/null +++ b/azure_monitor/src/azure_monitor/export/__init__.py @@ -0,0 +1,202 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import json +import logging +import typing +from enum import Enum + +import requests +from opentelemetry.sdk.metrics.export import MetricsExportResult +from opentelemetry.sdk.trace.export import SpanExportResult + +from azure_monitor.options import ExporterOptions +from azure_monitor.protocol import Envelope +from azure_monitor.storage import LocalFileStorage + +logger = logging.getLogger(__name__) + + +class ExportResult(Enum): + SUCCESS = 0 + FAILED_RETRYABLE = 1 + FAILED_NOT_RETRYABLE = 2 + + +# pylint: disable=broad-except +class BaseExporter: + """Azure Monitor base exporter for OpenTelemetry. + + Args: + options: :doc:`export.options` to allow configuration for the exporter + """ + + def __init__(self, **options): + self._telemetry_processors = [] + self.options = ExporterOptions(**options) + self.storage = LocalFileStorage( + path=self.options.storage_path, + max_size=self.options.storage_max_size, + maintenance_period=self.options.storage_maintenance_period, + retention_period=self.options.storage_retention_period, + ) + + def add_telemetry_processor( + self, processor: typing.Callable[..., any] + ) -> None: + """Adds telemetry processor to the collection. + + Telemetry processors will be called one by one before telemetry + item is pushed for sending and in the order they were added. + + Args: + processor: Processor to add + """ + self._telemetry_processors.append(processor) + + def clear_telemetry_processors(self) -> None: + """Removes all telemetry processors""" + self._telemetry_processors = [] + + def _apply_telemetry_processors( + self, envelopes: typing.List[Envelope] + ) -> typing.List[Envelope]: + """Applies all telemetry processors in the order they were added. + + This function will return the list of envelopes to be exported after + each processor has been run sequentially. Individual processors can + throw exceptions and fail, but the applying of all telemetry processors + will proceed (not fast fail). Processors also return True if envelope + should be included for exporting, False otherwise. + + Args: + envelopes: The envelopes to apply each processor to. + """ + filtered_envelopes = [] + for envelope in envelopes: + accepted = True + for processor in self._telemetry_processors: + try: + if processor(envelope) is False: + accepted = False + break + except Exception as ex: + logger.warning("Telemetry processor failed with: %s.", ex) + if accepted: + filtered_envelopes.append(envelope) + return filtered_envelopes + + def _transmit_from_storage(self) -> None: + for blob in self.storage.gets(): + # give a few more seconds for blob lease operation + # to reduce the chance of race (for perf consideration) + if blob.lease(self.options.timeout + 5): + envelopes = blob.get() # TODO: handle error + result = self._transmit(envelopes) + if result == ExportResult.FAILED_RETRYABLE: + blob.lease(1) + else: + blob.delete(silent=True) + + # pylint: disable=too-many-branches + # pylint: disable=too-many-nested-blocks + def _transmit(self, envelopes: typing.List[Envelope]) -> ExportResult: + """ + Transmit the data envelopes to the ingestion service. + + Returns an ExportResult, this function should never + throw an exception. + """ + if len(envelopes) > 0: + try: + response = requests.post( + url=self.options.endpoint, + data=json.dumps(envelopes), + headers={ + "Accept": "application/json", + "Content-Type": "application/json; charset=utf-8", + }, + timeout=self.options.timeout, + ) + except Exception as ex: + logger.warning("Transient client side error: %s.", ex) + return ExportResult.FAILED_RETRYABLE + + text = "N/A" + data = None + try: + text = response.text + except Exception as ex: + logger.warning("Error while reading response body %s.", ex) + else: + try: + data = json.loads(text) + except Exception: + pass + + if response.status_code == 200: + logger.info("Transmission succeeded: %s.", text) + return ExportResult.SUCCESS + if response.status_code == 206: # Partial Content + # TODO: store the unsent data + if data: + try: + resend_envelopes = [] + for error in data["errors"]: + if error["statusCode"] in ( + 429, # Too Many Requests + 500, # Internal Server Error + 503, # Service Unavailable + ): + resend_envelopes.append( + envelopes[error["index"]] + ) + else: + logger.error( + "Data drop %s: %s %s.", + error["statusCode"], + error["message"], + envelopes[error["index"]], + ) + if resend_envelopes: + self.storage.put(resend_envelopes) + except Exception as ex: + logger.error( + "Error while processing %s: %s %s.", + response.status_code, + text, + ex, + ) + return ExportResult.FAILED_NOT_RETRYABLE + # cannot parse response body, fallback to retry + + if response.status_code in ( + 206, # Partial Content + 429, # Too Many Requests + 500, # Internal Server Error + 503, # Service Unavailable + ): + return ExportResult.FAILED_RETRYABLE + + return ExportResult.FAILED_NOT_RETRYABLE + # No spans to export + return ExportResult.SUCCESS + + +def get_trace_export_result(result: ExportResult) -> SpanExportResult: + if result == ExportResult.SUCCESS: + return SpanExportResult.SUCCESS + if result == ExportResult.FAILED_RETRYABLE: + return SpanExportResult.FAILED_RETRYABLE + if result == ExportResult.FAILED_NOT_RETRYABLE: + return SpanExportResult.FAILED_NOT_RETRYABLE + return None + + +def get_metrics_export_result(result: ExportResult) -> MetricsExportResult: + if result == ExportResult.SUCCESS: + return MetricsExportResult.SUCCESS + if result == ExportResult.FAILED_RETRYABLE: + return MetricsExportResult.FAILED_RETRYABLE + if result == ExportResult.FAILED_NOT_RETRYABLE: + return MetricsExportResult.FAILED_NOT_RETRYABLE + return None diff --git a/azure_monitor/src/azure_monitor/export/metrics/__init__.py b/azure_monitor/src/azure_monitor/export/metrics/__init__.py new file mode 100644 index 0000000..98d7369 --- /dev/null +++ b/azure_monitor/src/azure_monitor/export/metrics/__init__.py @@ -0,0 +1,100 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import json +import logging +from typing import Sequence +from urllib.parse import urlparse + +from opentelemetry.metrics import Metric +from opentelemetry.sdk.metrics import Counter, Observer +from opentelemetry.sdk.metrics.export import ( + MetricRecord, + MetricsExporter, + MetricsExportResult, +) +from opentelemetry.sdk.util import ns_to_iso_str +from opentelemetry.util import time_ns + +from azure_monitor import protocol, utils +from azure_monitor.export import ( + BaseExporter, + ExportResult, + get_metrics_export_result, +) + +logger = logging.getLogger(__name__) + + +class AzureMonitorMetricsExporter(BaseExporter, MetricsExporter): + """Azure Monitor metrics exporter for OpenTelemetry. + + Args: + options: :doc:`export.options` to allow configuration for the exporter + """ + + def export( + self, metric_records: Sequence[MetricRecord] + ) -> MetricsExportResult: + envelopes = list(map(self._metric_to_envelope, metric_records)) + envelopes = list( + map( + lambda x: x.to_dict(), + self._apply_telemetry_processors(envelopes), + ) + ) + try: + result = self._transmit(envelopes) + if result == ExportResult.FAILED_RETRYABLE: + self.storage.put(envelopes, result) + if result == ExportResult.SUCCESS: + # Try to send any cached events + self._transmit_from_storage() + return get_metrics_export_result(result) + except Exception: # pylint: disable=broad-except + logger.exception("Exception occurred while exporting the data.") + return get_metrics_export_result(ExportResult.FAILED_NOT_RETRYABLE) + + def _metric_to_envelope( + self, metric_record: MetricRecord + ) -> protocol.Envelope: + + if not metric_record: + return None + # TODO: Opentelemetry does not have last updated timestamp for observer + # type metrics yet. + _time = time_ns() + if isinstance(metric_record.metric, Metric): + _time = metric_record.metric.bind( + metric_record.label_set + ).last_update_timestamp + envelope = protocol.Envelope( + ikey=self.options.instrumentation_key, + tags=dict(utils.azure_monitor_context), + time=ns_to_iso_str(_time), + ) + envelope.name = "Microsoft.ApplicationInsights.Metric" + value = 0 + metric = metric_record.metric + if isinstance(metric, Counter): + value = metric_record.aggregator.checkpoint + elif isinstance(metric, Observer): + value = metric_record.aggregator.checkpoint.last + if not value: + value = 0 + else: + # TODO: What do measure aggregations look like in AI? + logger.warning("Measure metric recorded.") + + data_point = protocol.DataPoint( + ns=metric_record.metric.description, + name=metric_record.metric.name, + value=value, + kind=protocol.DataPointType.MEASUREMENT.value, + ) + + properties = {} + for label_tuple in metric_record.label_set.labels: + properties[label_tuple[0]] = label_tuple[1] + data = protocol.MetricData(metrics=[data_point], properties=properties) + envelope.data = protocol.Data(base_data=data, base_type="MetricData") + return envelope diff --git a/azure_monitor/src/azure_monitor/export/trace/__init__.py b/azure_monitor/src/azure_monitor/export/trace/__init__.py new file mode 100644 index 0000000..805d271 --- /dev/null +++ b/azure_monitor/src/azure_monitor/export/trace/__init__.py @@ -0,0 +1,155 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import json +import logging +from typing import Sequence +from urllib.parse import urlparse + +from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult +from opentelemetry.sdk.util import ns_to_iso_str +from opentelemetry.trace import Span, SpanKind +from opentelemetry.trace.status import StatusCanonicalCode + +from azure_monitor import protocol, utils +from azure_monitor.export import ( + BaseExporter, + ExportResult, + get_trace_export_result, +) + +logger = logging.getLogger(__name__) + + +class AzureMonitorSpanExporter(BaseExporter, SpanExporter): + """Azure Monitor span exporter for OpenTelemetry. + + Args: + options: :doc:`export.options` to allow configuration for the exporter + """ + + def export(self, spans: Sequence[Span]) -> SpanExportResult: + envelopes = list(map(self._span_to_envelope, spans)) + envelopes = list( + map( + lambda x: x.to_dict(), + self._apply_telemetry_processors(envelopes), + ) + ) + try: + result = self._transmit(envelopes) + if result == ExportResult.FAILED_RETRYABLE: + self.storage.put(envelopes, result) + if result == ExportResult.SUCCESS: + # Try to send any cached events + self._transmit_from_storage() + return get_trace_export_result(result) + except Exception: # pylint: disable=broad-except + logger.exception("Exception occurred while exporting the data.") + return get_trace_export_result(ExportResult.FAILED_NOT_RETRYABLE) + + # pylint: disable=too-many-statements + # pylint: disable=too-many-branches + def _span_to_envelope(self, span: Span) -> protocol.Envelope: + if not span: + return None + envelope = protocol.Envelope( + ikey=self.options.instrumentation_key, + tags=dict(utils.azure_monitor_context), + time=ns_to_iso_str(span.start_time), + ) + envelope.tags["ai.operation.id"] = "{:032x}".format( + span.context.trace_id + ) + parent = span.parent + if isinstance(parent, Span): + parent = parent.context + if parent: + envelope.tags["ai.operation.parentId"] = "{:016x}".format( + parent.span_id + ) + if span.kind in (SpanKind.CONSUMER, SpanKind.SERVER): + envelope.name = "Microsoft.ApplicationInsights.Request" + data = protocol.Request( + id="{:016x}".format(span.context.span_id), + duration=utils.ns_to_duration(span.end_time - span.start_time), + response_code=str(span.status.canonical_code.value), + success=span.status.canonical_code + == StatusCanonicalCode.OK, # Modify based off attributes or Status + properties={}, + ) + envelope.data = protocol.Data( + base_data=data, base_type="RequestData" + ) + if "http.method" in span.attributes: + data.name = span.attributes["http.method"] + if "http.route" in span.attributes: + data.name = data.name + " " + span.attributes["http.route"] + envelope.tags["ai.operation.name"] = data.name + data.properties["request.name"] = data.name + elif "http.path" in span.attributes: + data.properties["request.name"] = ( + data.name + " " + span.attributes["http.path"] + ) + if "http.url" in span.attributes: + data.url = span.attributes["http.url"] + data.properties["request.url"] = span.attributes["http.url"] + if "http.status_code" in span.attributes: + status_code = span.attributes["http.status_code"] + data.response_code = str(status_code) + data.success = 200 <= status_code < 400 + else: + envelope.name = "Microsoft.ApplicationInsights.RemoteDependency" + data = protocol.RemoteDependency( + name=span.name, + id="{:016x}".format(span.context.span_id), + result_code=str(span.status.canonical_code.value), + duration=utils.ns_to_duration(span.end_time - span.start_time), + success=span.status.canonical_code + == StatusCanonicalCode.OK, # Modify based off attributes or Status + properties={}, + ) + envelope.data = protocol.Data( + base_data=data, base_type="RemoteDependencyData" + ) + if span.kind in (SpanKind.CLIENT, SpanKind.PRODUCER): + if ( + "component" in span.attributes + and span.attributes["component"] == "http" + ): + data.type = "HTTP" + if "http.url" in span.attributes: + url = span.attributes["http.url"] + # data is the url + data.data = url + parse_url = urlparse(url) + # TODO: error handling, probably put scheme as well + # target matches authority (host:port) + data.target = parse_url.netloc + if "http.method" in span.attributes: + # name is METHOD/path + data.name = ( + span.attributes["http.method"] + + "/" + + parse_url.path + ) + if "http.status_code" in span.attributes: + status_code = span.attributes["http.status_code"] + data.result_code = str(status_code) + data.success = 200 <= status_code < 400 + else: # SpanKind.INTERNAL + data.type = "InProc" + data.success = True + for key in span.attributes: + # This removes redundant data from ApplicationInsights + if key.startswith("http."): + continue + data.properties[key] = span.attributes[key] + if span.links: + links = [] + for link in span.links: + operation_id = "{:032x}".format(link.context.trace_id) + span_id = "{:016x}".format(link.context.span_id) + links.append({"operation_Id": operation_id, "id": span_id}) + data.properties["_MS.links"] = json.dumps(links) + # TODO: tracestate, tags + return envelope diff --git a/azure_monitor/src/azure_monitor/options.py b/azure_monitor/src/azure_monitor/options.py new file mode 100644 index 0000000..a8e6c91 --- /dev/null +++ b/azure_monitor/src/azure_monitor/options.py @@ -0,0 +1,151 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import os +import re +import sys +import typing + +from azure_monitor.protocol import BaseObject + +INGESTION_ENDPOINT = "ingestionendpoint" +INSTRUMENTATION_KEY = "instrumentationkey" + +# Validate UUID format +# Specs taken from https://tools.ietf.org/html/rfc4122 +uuid_regex_pattern = re.compile( + "^[0-9a-f]{8}-" + "[0-9a-f]{4}-" + "[1-5][0-9a-f]{3}-" + "[89ab][0-9a-f]{3}-" + "[0-9a-f]{12}$" +) + + +class ExporterOptions(BaseObject): + """Options to configure Azure exporters. + + Args: + connection_string: Azure Connection String. + instrumentation_key: Azure Instrumentation Key. + storage_maintenance_period: Local storage maintenance interval in seconds. + storage_max_size: Local storage maximum size in bytes. + storage_path: Local storage file path. + storage_retention_period: Local storage retention period in seconds + timeout: Request timeout in seconds + """ + + __slots__ = ( + "connection_string", + "endpoint", + "instrumentation_key", + "storage_maintenance_period", + "storage_max_size", + "storage_path", + "storage_retention_period", + "timeout", + ) + + def __init__( + self, + connection_string: str = None, + instrumentation_key: str = None, + storage_maintenance_period: int = 60, + storage_max_size: int = 100 * 1024 * 1024, + storage_path: str = None, + storage_retention_period: int = 7 * 24 * 60 * 60, + timeout: int = 10.0, # networking timeout in seconds + ) -> None: + if storage_path is None: + storage_path = os.path.join( + os.path.expanduser("~"), + ".opentelemetry", + ".azure", + os.path.basename(sys.argv[0]) or ".console", + ) + self.connection_string = connection_string + self.instrumentation_key = instrumentation_key + self.storage_maintenance_period = storage_maintenance_period + self.storage_max_size = storage_max_size + self.storage_path = storage_path + self.storage_retention_period = storage_retention_period + self.timeout = timeout + self.endpoint = "" + self._initialize() + self._validate_instrumentation_key() + + def _initialize(self) -> None: + code_cs = parse_connection_string(self.connection_string) + code_ikey = self.instrumentation_key + env_cs = parse_connection_string( + os.getenv("APPLICATIONINSIGHTS_CONNECTION_STRING") + ) + env_ikey = os.getenv("APPINSIGHTS_INSTRUMENTATIONKEY") + + # The priority of which value takes on the instrumentation key is: + # 1. Key from explicitly passed in connection string + # 2. Key from explicitly passed in instrumentation key + # 3. Key from connection string in environment variable + # 4. Key from instrumentation key in environment variable + self.instrumentation_key = ( + code_cs.get(INSTRUMENTATION_KEY) + or code_ikey + or env_cs.get(INSTRUMENTATION_KEY) + or env_ikey + ) + # The priority of the ingestion endpoint is as follows: + # 1. The endpoint explicitly passed in connection string + # 2. The endpoint from the connection string in environment variable + # 3. The default breeze endpoint + endpoint = ( + code_cs.get(INGESTION_ENDPOINT) + or env_cs.get(INGESTION_ENDPOINT) + or "https://dc.services.visualstudio.com" + ) + self.endpoint = endpoint + "/v2/track" + + def _validate_instrumentation_key(self) -> None: + """Validates the instrumentation key used for Azure Monitor. + An instrumentation key cannot be null or empty. An instrumentation key + is valid for Azure Monitor only if it is a valid UUID. + :param instrumentation_key: The instrumentation key to validate + """ + if not self.instrumentation_key: + raise ValueError("Instrumentation key cannot be none or empty.") + match = uuid_regex_pattern.match(self.instrumentation_key) + if not match: + raise ValueError("Invalid instrumentation key.") + + +def parse_connection_string(connection_string) -> typing.Dict: + if connection_string is None: + return {} + try: + pairs = connection_string.split(";") + result = dict(s.split("=") for s in pairs) + # Convert keys to lower-case due to case type-insensitive checking + result = {key.lower(): value for key, value in result.items()} + except Exception: + raise ValueError("Invalid connection string") + # Validate authorization + auth = result.get("authorization") + if auth is not None and auth.lower() != "ikey": + raise ValueError("Invalid authorization mechanism") + # Construct the ingestion endpoint if not passed in explicitly + if result.get(INGESTION_ENDPOINT) is None: + endpoint_suffix = "" + location_prefix = "" + suffix = result.get("endpointsuffix") + if suffix is not None: + endpoint_suffix = suffix + # Get regional information if provided + prefix = result.get("location") + if prefix is not None: + location_prefix = prefix + "." + endpoint = "https://{0}dc.{1}".format( + location_prefix, endpoint_suffix + ) + result[INGESTION_ENDPOINT] = endpoint + else: + # Default to None if cannot construct + result[INGESTION_ENDPOINT] = None + return result diff --git a/azure_monitor/src/azure_monitor/protocol.py b/azure_monitor/src/azure_monitor/protocol.py index 0de20a9..9f68f16 100644 --- a/azure_monitor/src/azure_monitor/protocol.py +++ b/azure_monitor/src/azure_monitor/protocol.py @@ -1,188 +1,563 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. -class BaseObject(dict): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - for key in kwargs: - self[key] = kwargs[key] + +import typing +from enum import Enum + + +class BaseObject: + __slots__ = () def __repr__(self): tmp = {} - current = self - while True: - for item in current.items(): - if item[0] not in tmp: - tmp[item[0]] = item[1] - if ( - current._default # noqa pylint: disable=protected-access - == current - ): - break - current = current._default # noqa pylint: disable=protected-access + + for key in self.__slots__: + data = getattr(self, key, None) + if isinstance(data, BaseObject): + tmp[key] = repr(data) + else: + tmp[key] = data + return repr(tmp) - def __setattr__(self, name, value): - self[name] = value - def __getattr__(self, name): - try: - return self[name] - except KeyError: - raise AttributeError( - "'{}' object has no attribute {}".format( - type(self).__name__, name - ) - ) +class Data(BaseObject): + """Data - def __getitem__(self, key): - if self._default is self: - return super().__getitem__(key) - if key in self: - return super().__getitem__(key) - return self._default[key] + Args: + base_data: Container for data item (B section). + base_type: Name of item (B section) if any. If telemetry data is + derived straight from this, this should be None. + """ + __slots__ = ("base_data", "base_type") -BaseObject._default = BaseObject() # noqa pylint: disable=protected-access + def __init__(self, base_data: any = None, base_type: str = None) -> None: + self.base_data = base_data + self.base_type = base_type + def to_dict(self): + return { + "baseData": self.base_data.to_dict(), + "baseType": self.base_type, + } -class Data(BaseObject): - _default = BaseObject(baseData=None, baseType=None) - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.baseData = self.baseData # noqa pylint: disable=invalid-name - self.baseType = self.baseType # noqa pylint: disable=invalid-name +class DataPointType(Enum): + MEASUREMENT = 0 + AGGREGATION = 1 class DataPoint(BaseObject): - _default = BaseObject( - ns="", - name="", - kind=None, - value=0.0, - count=None, - min=None, - max=None, - stdDev=None, + """Metric data single measurement. + + Args: + ns: Namespace of the metric + name: Name of the metric. + kind: Metric type. Single measurement or the aggregated value. + value: Single value for measurement. Sum of individual measurements for the aggregation. + count: Metric weight of the aggregated metric. Should not be set for a measurement. + min: Minimum value of the aggregated metric. Should not be set for a measurement. + max: Maximum value of the aggregated metric. Should not be set for a measurement. + std_dev: Standard deviation of the aggregated metric. Should not be set for a measurement. + """ + + __slots__ = ( + "ns", + "name", + "kind", + "value", + "count", + "min", + "max", + "std_dev", ) - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.name = self.name - self.value = self.value + def __init__( + self, + ns: str = "", + name: str = "", + kind: DataPointType = None, + value: float = 0.0, + count: float = None, + min: float = None, # pylint: disable=redefined-builtin + max: float = None, # pylint: disable=redefined-builtin + std_dev: float = None, + ) -> None: + self.ns = ns # pylint: disable=invalid-name + self.name = name + self.kind = kind + self.value = value + self.count = count + self.min = min + self.max = max + self.std_dev = std_dev + + def to_dict(self): + return { + "ns": self.ns, + "name": self.name, + "kind": self.kind, + "value": self.value, + "count": self.count, + "min": self.min, + "max": self.max, + "stdDev": self.std_dev, + } class Envelope(BaseObject): - _default = BaseObject( - ver=1, - name="", - time="", - sampleRate=None, - seq=None, - iKey=None, - flags=None, - tags=None, - data=None, + """Envelope represents a telemetry item + + Args: + ver: Envelope version. For internal use only. By assigning this the default, + it will not be serialized within the payload unless changed to a value other + than #1. + name: Type name of telemetry data item. + time: Event date time when telemetry item was created. This is the wall clock + time on the client when the event was generated. + There is no guarantee that the client's time is accurate. This field must be + formatted in UTC ISO 8601 format + sample_rate: Sampling rate used in application. This telemetry item represents + 1 / sampleRate actual telemetry items. + seq: Sequence field used to track absolute order of uploaded events. + ikey: The application's instrumentation key. + flags: Key/value collection of flags. + tags: Key/value collection of context properties. + data: Telemetry data item. + """ + + __slots__ = ( + "ver", + "name", + "time", + "sample_rate", + "seq", + "ikey", + "flags", + "tags", + "data", ) - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.name = self.name - self.time = self.time + def __init__( + self, + ver: int = 1, + name: str = "", + time: str = "", + sample_rate: int = None, + seq: str = None, + ikey: str = None, + flags: typing.Dict = None, + tags: typing.Dict = None, + data: Data = None, + ) -> None: + self.ver = ver + self.name = name + self.time = time + self.sample_rate = sample_rate + self.seq = seq + self.ikey = ikey + self.flags = flags + self.tags = tags + self.data = data + + def to_dict(self): + return { + "ver": self.ver, + "name": self.name, + "time": self.time, + "sampleRate": self.sample_rate, + "seq": self.seq, + "iKey": self.ikey, + "flags": self.flags, + "tags": self.tags, + "data": self.data.to_dict() if self.data else None, + } class Event(BaseObject): - _default = BaseObject(ver=2, name="", properties=None, measurements=None) + """Instances of Event represent structured event records that can be grouped + and searched by their properties. Event data item also creates a metric of + event count by name. + + Args: + ver: Schema version. + name: Event name. Keep it low cardinality to allow proper grouping and + useful metrics. + properties: Collection of custom properties. + measurements: Collection of custom measurements. + """ + + __slots__ = ("ver", "name", "properties", "measurements") + + def __init__( + self, + ver: int = 2, + name: str = "", + properties: typing.Dict[str, any] = None, + measurements: typing.Dict[str, int] = None, + ): + self.ver = ver + self.name = name + self.properties = properties + self.measurements = measurements + + def to_dict(self): + return { + "ver": self.ver, + "name": self.name, + "properties": self.properties, + "measurements": self.measurements, + } + + +class ExceptionDetails(BaseObject): + """Exception details of the exception in a chain. + + Args: + id: In case exception is nested (outer exception contains inner one), + the id and outerId properties are used to represent the nesting. + outer_id: The value of outerId is a reference to an element in + ExceptionDetails that represents the outer exception. + type_name: Exception type name. + message: Exception message. + has_full_stack: Indicates if full exception stack is provided in the exception. + The stack may be trimmed, such as in the case of a StackOverflow exception. + stack: Text describing the stack. Either stack or parsedStack should have a + value. + parsed_stack: List of stack frames. Either stack or parsedStack should have + a value. + """ + + __slots__ = ( + "id", + "outer_id", + "type_name", + "message", + "has_full_stack", + "stack", + "parsed_stack", + ) - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.ver = self.ver - self.name = self.name + def __init__( + self, + id: int = None, # pylint: disable=redefined-builtin + outer_id: int = None, + type_name: str = None, + message: str = None, + has_full_stack: bool = None, + stack: str = None, + parsed_stack: any = None, + ) -> None: + self.id = id # pylint: disable=invalid-name + self.outer_id = outer_id + self.type_name = type_name + self.message = message + self.has_full_stack = has_full_stack + self.stack = stack + self.parsed_stack = parsed_stack + + def to_dict(self): + return { + "id": self.id, + "outerId": self.outer_id, + "typeName": self.type_name, + "message": self.message, + "hasFullStack ": self.has_full_stack, + "stack": self.stack, + "parsedStack": self.parsed_stack, + } class ExceptionData(BaseObject): - _default = BaseObject( - ver=2, - exceptions=[], - severityLevel=None, - problemId=None, - properties=None, - measurements=None, + """An instance of Exception represents a handled or unhandled exception that + occurred during execution of the monitored application. + + Args: + ver: Schema version. + exceptions: Exception chain - list of inner exceptions. + severity_level: Severity level. Mostly used to indicate exception severity + level when it is reported by logging library. + problem_id: Identifier of where the exception was thrown in code. + Used for exceptions grouping. Typically a combination of exception type + and a function from the call stack. + properties: Collection of custom properties. + measurements: Collection of custom measurements. + """ + + __slots__ = ( + "ver", + "exceptions", + "severity_level", + "problem_id", + "properties", + "measurements", ) - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.ver = self.ver - self.exceptions = self.exceptions + def __init__( + self, + ver: int = 2, + exceptions: typing.List[ExceptionDetails] = None, + severity_level: int = None, + problem_id: str = None, + properties: typing.Dict[str, any] = None, + measurements: typing.Dict[str, int] = None, + ) -> None: + if exceptions is None: + exceptions = [] + self.ver = ver + self.exceptions = exceptions + self.severity_level = severity_level + self.problem_id = problem_id + self.properties = properties + self.measurements = measurements + + def to_dict(self): + return { + "ver": self.ver, + "exceptions": self.exceptions, + "severityLevel": self.severity_level, + "problemId": self.problem_id, + "properties": self.properties, + "measurements": self.measurements, + } + + +class SeverityLevel(Enum): + VERBOSE = 0 + INFORMATION = 1 + WARNING = 2 + ERROR = 3 + CRITICAL = 4 class Message(BaseObject): - _default = BaseObject( - ver=2, - message="", - severityLevel=None, - properties=None, - measurements=None, + """Instances of Message represent printf-like trace statements that are + text-searched. The message does not have measurements. + + Args: + ver: Schema version. + message: Trace message. + severity_level: Trace severity level. + properties: Collection of custom properties. + """ + + __slots__ = ( + "ver", + "message", + "measurements", + "severity_level", + "properties", ) - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.ver = self.ver - self.message = self.message + def __init__( + self, + ver: int = 2, + message: str = "", + severity_level: SeverityLevel = None, + properties: typing.Dict[str, any] = None, + measurements: typing.Dict[str, int] = None, + ) -> None: + self.ver = ver + self.message = message + self.severity_level = severity_level + self.properties = properties + self.measurements = measurements + + def to_dict(self): + return { + "ver": self.ver, + "message": self.message, + "severityLevel": self.severity_level, + "properties": self.properties, + "measurements": self.measurements, + } class MetricData(BaseObject): - _default = BaseObject(ver=2, metrics=[], properties=None) - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.ver = self.ver - self.metrics = self.metrics + """An instance of the Metric item is a list of measurements (single data points) + and/or aggregations. + + Args: + ver: Data base data. + metrics: List of metrics. Only one metric in the list is currently supported + by Application Insights storage. If multiple data points were sent only the + first one will be used. + properties:Collection of custom properties. + """ + + __slots__ = ("ver", "metrics", "properties") + + def __init__( + self, + ver: int = 2, + metrics: typing.List[DataPoint] = None, + properties: typing.Dict[str, any] = None, + ) -> None: + if metrics is None: + metrics = [] + self.ver = ver + self.metrics = metrics + self.properties = properties + + def to_dict(self): + return { + "ver": self.ver, + "metrics": list(map(lambda x: x.to_dict(), self.metrics)), + "properties": self.properties, + } class RemoteDependency(BaseObject): - _default = BaseObject( - ver=2, - name="", - id="", - resultCode="", - duration="", - success=True, - data=None, - type=None, - target=None, - properties=None, - measurements=None, + """An instance of Remote Dependency represents an interaction of the monitored component + with a remote component/service like SQL or an HTTP endpoint. + + Args: + ver: Schema version. + name: Name of the command initiated with this dependency call. Low cardinality value. + Examples are stored procedure name and URL path template. + id: Identifier of a dependency call instance. Used for correlation with the request + telemetry item corresponding to this dependency call. + result_code: Result code of a dependency call. Examples are SQL error code and HTTP + status code. + duration: Request duration in format: DD.HH:MM:SS.MMMMMM. Must be less than 1000 days. + success: Indication of successfull or unsuccessfull call. + data: Command initiated by this dependency call. Examples are SQL statement and HTTP + URL's with all query parameters. + type: Dependency type name. Very low cardinality value for logical grouping of + dependencies and interpretation of other fields like commandName and resultCode. + Examples are SQL, Azure table, and HTTP. + target: Target site of a dependency call. Examples are server name, host address. + properties: Collection of custom properties. + measurements: Collection of custom measurements. + """ + + __slots__ = ( + "ver", + "name", + "id", + "result_code", + "duration", + "success", + "data", + "type", + "target", + "properties", + "measurements", ) - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.ver = self.ver - self.name = self.name - self.resultCode = self.resultCode # noqa pylint: disable=invalid-name - self.duration = self.duration + def __init__( + self, + ver: int = 2, + name: str = "", + id: str = "", # pylint: disable=redefined-builtin + result_code: str = "", + duration: str = "", + success: bool = True, + data: Data = None, + type: str = None, # pylint: disable=redefined-builtin + target: str = None, + properties: typing.Dict[str, any] = None, + measurements: typing.Dict[str, int] = None, + ) -> None: + self.ver = ver + self.name = name + self.id = id # pylint: disable=invalid-name + self.result_code = result_code + self.duration = duration + self.success = success + self.data = data + self.type = type + self.target = target + self.properties = properties + self.measurements = measurements + + def to_dict(self): + return { + "ver": self.ver, + "name": self.name, + "id": self.id, + "resultCode": self.result_code, + "duration": self.duration, + "success": self.success, + "data": self.data, + "type": self.type, + "target": self.target, + "properties": self.properties, + "measurements": self.measurements, + } class Request(BaseObject): - _default = BaseObject( - ver=2, - id="", - duration="", - responseCode="", - success=True, - source=None, - name=None, - url=None, - properties=None, - measurements=None, + """An instance of Request represents completion of an external request to the + application to do work and contains a summary of that request execution and the + results. + + Args: + ver: Schema version. + id: Identifier of a request call instance. Used for correlation between request + and other telemetry items. + duration: Request duration in format: DD.HH:MM:SS.MMMMMM. Must be less than 1000 + days. + response_code: Response code from Request + success: Indication of successfull or unsuccessfull call. + source: Source of the request. Examples are the instrumentation key of the caller + or the ip address of the caller. + name: Name of the request. Represents code path taken to process request. Low + cardinality value to allow better grouping of requests. For HTTP requests it + represents the HTTP method and URL path template like 'GET /values/{id}'. + url: Request URL with all query string parameters. + properties: Collection of custom properties. + measurements: Collection of custom measurements. + """ + + __slots__ = ( + "ver", + "id", + "duration", + "response_code", + "success", + "source", + "name", + "url", + "properties", + "measurements", ) - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.ver = self.ver - self.id = self.id # noqa pylint: disable=invalid-name - self.duration = self.duration - self.responseCode = ( # noqa pylint: disable=invalid-name - self.responseCode - ) - self.success = self.success + def __init__( + self, + ver: int = 2, + id: str = "", # pylint: disable=redefined-builtin + duration: str = "", + response_code: str = "", + success: bool = True, + source: str = None, + name: str = None, + url: str = None, + properties: typing.Dict[str, any] = None, + measurements: typing.Dict[str, int] = None, + ) -> None: + self.ver = ver + self.id = id # pylint: disable=invalid-name + self.duration = duration + self.response_code = response_code + self.success = success + self.source = source + self.name = name + self.url = url + self.properties = properties + self.measurements = measurements + + def to_dict(self): + return { + "ver": self.ver, + "id": self.id, + "duration": self.duration, + "responseCode": self.response_code, + "success": self.success, + "source": self.source, + "name": self.name, + "url": self.url, + "properties": self.properties, + "measurements": self.measurements, + } diff --git a/azure_monitor/src/azure_monitor/sdk/__init__.py b/azure_monitor/src/azure_monitor/sdk/__init__.py new file mode 100644 index 0000000..4eeadf2 --- /dev/null +++ b/azure_monitor/src/azure_monitor/sdk/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +from . import auto_collection + +__all__ = ["auto_collection"] diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/__init__.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/__init__.py new file mode 100644 index 0000000..b7ebe41 --- /dev/null +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/__init__.py @@ -0,0 +1,34 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +# +from opentelemetry.metrics import LabelSet, Meter + +from azure_monitor.sdk.auto_collection.dependency_metrics import ( + DependencyMetrics, +) +from azure_monitor.sdk.auto_collection.performance_metrics import ( + PerformanceMetrics, +) +from azure_monitor.sdk.auto_collection.request_metrics import RequestMetrics + +__all__ = [ + "AutoCollection", + "DependencyMetrics", + "RequestMetrics", + "PerformanceMetrics", +] + + +class AutoCollection: + """Starts auto collection of standard metrics, including performance, + dependency and request metrics. + + Args: + meter: OpenTelemetry Meter + label_set: OpenTelemetry label set + """ + + def __init__(self, meter: Meter, label_set: LabelSet): + self._performance_metrics = PerformanceMetrics(meter, label_set) + self._dependency_metrics = DependencyMetrics(meter, label_set) + self._request_metrics = RequestMetrics(meter, label_set) diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/dependency_metrics.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/dependency_metrics.py new file mode 100644 index 0000000..4b86a13 --- /dev/null +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/dependency_metrics.py @@ -0,0 +1,77 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import threading +import time + +import requests +from opentelemetry import context +from opentelemetry.metrics import Meter +from opentelemetry.sdk.metrics import LabelSet + +dependency_map = dict() +_dependency_lock = threading.Lock() +ORIGINAL_REQUEST = requests.Session.request + + +def dependency_patch(*args, **kwargs) -> None: + result = ORIGINAL_REQUEST(*args, **kwargs) + # Only collect request metric if sent from non-exporter thread + if context.get_value("suppress_instrumentation") is None: + # We don't want multiple threads updating this at once + with _dependency_lock: + count = dependency_map.get("count", 0) + dependency_map["count"] = count + 1 + return result + + +class DependencyMetrics: + """Starts auto collection of dependency metrics, including + "Outgoing Requests per second" metric. + + Args: + meter: OpenTelemetry Meter + label_set: OpenTelemetry label set + """ + + def __init__(self, meter: Meter, label_set: LabelSet): + self._meter = meter + self._label_set = label_set + # Patch requests + requests.Session.request = dependency_patch + meter.register_observer( + callback=self._track_dependency_rate, + name="\\ApplicationInsights\\Dependency Calls/Sec", + description="Outgoing Requests per second", + unit="rps", + value_type=int, + ) + + def _track_dependency_rate(self, observer) -> None: + """ Track Dependency rate + + Calculated by obtaining the number of outgoing requests made + using the requests library within an elapsed time and dividing + that value over the elapsed time. + """ + current_count = dependency_map.get("count", 0) + current_time = time.time() + last_count = dependency_map.get("last_count", 0) + last_time = dependency_map.get("last_time") + last_result = dependency_map.get("last_result", 0) + + try: + # last_time is None the very first time this function is called + if last_time is not None: + elapsed_seconds = current_time - last_time + interval_count = current_count - last_count + result = interval_count / elapsed_seconds + else: + result = 0 + dependency_map["last_time"] = current_time + dependency_map["last_count"] = current_count + dependency_map["last_result"] = result + observer.observe(int(result), self._label_set) + except ZeroDivisionError: + # If elapsed_seconds is 0, exporter call made too close to previous + # Return the previous result if this is the case + observer.observe(int(last_result), self._label_set) diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/performance_metrics.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/performance_metrics.py new file mode 100644 index 0000000..0a66fce --- /dev/null +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/performance_metrics.py @@ -0,0 +1,102 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import logging + +import psutil +from opentelemetry.metrics import Meter +from opentelemetry.sdk.metrics import LabelSet + +logger = logging.getLogger(__name__) +PROCESS = psutil.Process() + + +class PerformanceMetrics: + """Starts auto collection of performance metrics, including + "Processor time as a percentage", "Amount of available memory + in bytes", "Process CPU usage as a percentage" and "Amount of + memory process has used in bytes" metrics. + + Args: + meter: OpenTelemetry Meter + label_set: OpenTelemetry label set + """ + + def __init__(self, meter: Meter, label_set: LabelSet): + self._meter = meter + self._label_set = label_set + # Create performance metrics + meter.register_observer( + callback=self._track_cpu, + name="\\Processor(_Total)\\% Processor Time", + description="Processor time as a percentage", + unit="percentage", + value_type=float, + ) + meter.register_observer( + callback=self._track_memory, + name="\\Memory\\Available Bytes", + description="Amount of available memory in bytes", + unit="byte", + value_type=int, + ) + meter.register_observer( + callback=self._track_process_cpu, + name="\\Process(??APP_WIN32_PROC??)\\% Processor Time", + description="Process CPU usage as a percentage", + unit="percentage", + value_type=float, + ) + meter.register_observer( + callback=self._track_process_memory, + name="\\Process(??APP_WIN32_PROC??)\\Private Bytes", + description="Amount of memory process has used in bytes", + unit="byte", + value_type=int, + ) + + def _track_cpu(self, observer) -> None: + """ Track CPU time + + Processor time is defined as a float representing the current system + wide CPU utilization minus idle CPU time as a percentage. Idle CPU + time is defined as the time spent doing nothing. Return values range + from 0.0 to 100.0 inclusive. + """ + cpu_times_percent = psutil.cpu_times_percent() + observer.observe(100.0 - cpu_times_percent.idle, self._label_set) + + def _track_memory(self, observer) -> None: + """ Track Memory + + Available memory is defined as memory that can be given instantly to + processes without the system going into swap. + """ + observer.observe(psutil.virtual_memory().available, self._label_set) + + def _track_process_cpu(self, observer) -> None: + """ Track Process CPU time + + Returns a derived gauge for the CPU usage for the current process. + Return values range from 0.0 to 100.0 inclusive. + """ + try: + # In the case of a process running on multiple threads on different + # CPU cores, the returned value of cpu_percent() can be > 100.0. We + # normalize the cpu process using the number of logical CPUs + cpu_count = psutil.cpu_count(logical=True) + observer.observe( + PROCESS.cpu_percent() / cpu_count, self._label_set + ) + except Exception: # pylint: disable=broad-except + logger.exception("Error handling get process cpu usage.") + + def _track_process_memory(self, observer) -> None: + """ Track Memory + + Available memory is defined as memory that can be given instantly to + processes without the system going into swap. + """ + try: + observer.observe(PROCESS.memory_info().rss, self._label_set) + except Exception: # pylint: disable=broad-except + logger.exception("Error handling get process private bytes.") diff --git a/azure_monitor/src/azure_monitor/sdk/auto_collection/request_metrics.py b/azure_monitor/src/azure_monitor/sdk/auto_collection/request_metrics.py new file mode 100644 index 0000000..2505cbe --- /dev/null +++ b/azure_monitor/src/azure_monitor/sdk/auto_collection/request_metrics.py @@ -0,0 +1,141 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import logging +import threading +import time +from http.server import HTTPServer + +from opentelemetry.metrics import LabelSet, Meter + +logger = logging.getLogger(__name__) + +_requests_lock = threading.Lock() +requests_map = dict() +ORIGINAL_CONSTRUCTOR = HTTPServer.__init__ + + +def request_patch(func): + def wrapper(self=None): + start_time = time.time() + func(self) + end_time = time.time() + + with _requests_lock: + # Update Count + count = requests_map.get("count", 0) + requests_map["count"] = count + 1 + # Update duration + duration = requests_map.get("duration", 0) + requests_map["duration"] = duration + (end_time - start_time) + + return wrapper + + +def server_patch(*args, **kwargs): + if len(args) >= 3: + handler = args[2] + if handler: + # Patch the handler methods if they exist + if "do_DELETE" in dir(handler): + handler.do_DELETE = request_patch(handler.do_DELETE) + if "do_GET" in dir(handler): + handler.do_GET = request_patch(handler.do_GET) + if "do_HEAD" in dir(handler): + handler.do_HEAD = request_patch(handler.do_HEAD) + if "do_OPTIONS" in dir(handler): + handler.do_OPTIONS = request_patch(handler.do_OPTIONS) + if "do_POST" in dir(handler): + handler.do_POST = request_patch(handler.do_POST) + if "do_PUT" in dir(handler): + handler.do_PUT = request_patch(handler.do_PUT) + result = ORIGINAL_CONSTRUCTOR(*args, **kwargs) + return result + + +class RequestMetrics: + """Starts auto collection of request metrics, including + "Incoming Requests Average Execution Time" and + "Incoming Requests Average Execution Rate" metrics. + + Args: + meter: OpenTelemetry Meter + label_set: OpenTelemetry label set + """ + + def __init__(self, meter: Meter, label_set: LabelSet): + self._meter = meter + self._label_set = label_set + # Patch the HTTPServer handler to track request information + HTTPServer.__init__ = server_patch + + meter.register_observer( + callback=self._track_request_duration, + name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Request Execution Time", + description="Incoming Requests Average Execution Time", + unit="milliseconds", + value_type=int, + ) + + meter.register_observer( + callback=self._track_request_rate, + name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Requests/Sec", + description="Incoming Requests Average Execution Rate", + unit="rps", + value_type=int, + ) + + def _track_request_duration(self, observer) -> None: + """ Track Request execution time + + Calculated by getting the time it takes to make an incoming request + and dividing over the amount of incoming requests over an elapsed time. + """ + last_average_duration = requests_map.get("last_average_duration", 0) + interval_duration = requests_map.get("duration", 0) - requests_map.get( + "last_duration", 0 + ) + interval_count = requests_map.get("count", 0) - requests_map.get( + "last_count", 0 + ) + try: + result = interval_duration / interval_count + requests_map["last_average_duration"] = result + requests_map["last_duration"] = requests_map.get("duration", 0) + # Convert to milliseconds + observer.observe(int(result * 1000.0), self._label_set) + except ZeroDivisionError: + # If interval_count is 0, exporter call made too close to previous + # Return the previous result if this is the case + observer.observe( + int(last_average_duration * 1000.0), self._label_set + ) + + def _track_request_rate(self, observer) -> None: + """ Track Request execution rate + + Calculated by obtaining by getting the number of incoming requests + made to an HTTPServer within an elapsed time and dividing that value + over the elapsed time. + """ + current_time = time.time() + last_rate = requests_map.get("last_rate", 0) + last_time = requests_map.get("last_time") + + try: + # last_rate_time is None the first time this function is called + if last_time is not None: + interval_time = current_time - requests_map.get("last_time", 0) + interval_count = requests_map.get( + "count", 0 + ) - requests_map.get("last_count", 0) + result = interval_count / interval_time + else: + result = 0 + requests_map["last_time"] = current_time + requests_map["last_count"] = requests_map.get("count", 0) + requests_map["last_rate"] = result + observer.observe(int(result), self._label_set) + except ZeroDivisionError: + # If elapsed_seconds is 0, exporter call made too close to previous + # Return the previous result if this is the case + observer.observe(int(last_rate), self._label_set) diff --git a/azure_monitor/src/azure_monitor/storage.py b/azure_monitor/src/azure_monitor/storage.py new file mode 100644 index 0000000..f8a265a --- /dev/null +++ b/azure_monitor/src/azure_monitor/storage.py @@ -0,0 +1,181 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import datetime +import json +import os +import random + +from azure_monitor.utils import PeriodicTask + + +def _fmt(timestamp): + return timestamp.strftime("%Y-%m-%dT%H%M%S.%f") + + +def _now(): + return datetime.datetime.utcnow() + + +def _seconds(seconds): + return datetime.timedelta(seconds=seconds) + + +# pylint: disable=broad-except +class LocalFileBlob: + def __init__(self, fullpath): + self.fullpath = fullpath + + def delete(self, silent=False): + try: + os.remove(self.fullpath) + except Exception: + if not silent: + raise + + def get(self, silent=False): + try: + with open(self.fullpath, "r") as file: + return tuple( + json.loads(line.strip()) for line in file.readlines() + ) + except Exception: + if not silent: + raise + + def put(self, data, lease_period=0, silent=False): + try: + fullpath = self.fullpath + ".tmp" + with open(fullpath, "w") as file: + for item in data: + file.write(json.dumps(item)) + # The official Python doc: Do not use os.linesep as a line + # terminator when writing files opened in text mode (the + # default); use a single '\n' instead, on all platforms. + file.write("\n") + if lease_period: + timestamp = _now() + _seconds(lease_period) + self.fullpath += "@{}.lock".format(_fmt(timestamp)) + os.rename(fullpath, self.fullpath) + return self + except Exception: + if not silent: + raise + + def lease(self, period): + timestamp = _now() + _seconds(period) + fullpath = self.fullpath + if fullpath.endswith(".lock"): + fullpath = fullpath[: fullpath.rindex("@")] + fullpath += "@{}.lock".format(_fmt(timestamp)) + try: + os.rename(self.fullpath, fullpath) + except Exception: + return None + self.fullpath = fullpath + return self + + +# pylint: disable=broad-except +class LocalFileStorage: + def __init__( + self, + path, + max_size=100 * 1024 * 1024, # 100MB + maintenance_period=60, # 1 minute + retention_period=7 * 24 * 60 * 60, # 7 days + write_timeout=60, # 1 minute + ): + self.path = os.path.abspath(path) + self.max_size = max_size + self.maintenance_period = maintenance_period + self.retention_period = retention_period + self.write_timeout = write_timeout + self._maintenance_routine(silent=False) + self._maintenance_task = PeriodicTask( + interval=self.maintenance_period, + function=self._maintenance_routine, + kwargs={"silent": True}, + ) + self._maintenance_task.daemon = True + self._maintenance_task.start() + + def close(self): + self._maintenance_task.cancel() + self._maintenance_task.join() + + def __enter__(self): + return self + + # pylint: disable=redefined-builtin + def __exit__(self, type, value, traceback): + self.close() + + def _maintenance_routine(self, silent=False): + try: + if not os.path.isdir(self.path): + os.makedirs(self.path) + except Exception: + if not silent: + raise + try: + for blob in self.gets(): + pass + except Exception: + if not silent: + raise + + def gets(self): + now = _now() + lease_deadline = _fmt(now) + retention_deadline = _fmt(now - _seconds(self.retention_period)) + timeout_deadline = _fmt(now - _seconds(self.write_timeout)) + for name in sorted(os.listdir(self.path)): + path = os.path.join(self.path, name) + if not os.path.isfile(path): + continue # skip if not a file + if path.endswith(".tmp"): + if name < timeout_deadline: + try: + os.remove(path) # TODO: log data loss + except Exception: + pass # keep silent + if path.endswith(".lock"): + if path[path.rindex("@") + 1 : -5] > lease_deadline: + continue # under lease + new_path = path[: path.rindex("@")] + try: + os.rename(path, new_path) + except Exception: + continue # keep silent + path = new_path + if path.endswith(".blob"): + if name < retention_deadline: + try: + os.remove(path) # TODO: log data loss + except Exception: + pass # keep silent + else: + yield LocalFileBlob(path) + + def get(self): + cursor = self.gets() + try: + return next(cursor) + except StopIteration: + pass + return None + + def put(self, data, lease_period=0, silent=False): + blob = LocalFileBlob( + os.path.join( + self.path, + "{}-{}.blob".format( + _fmt(_now()), + "{:08x}".format( + random.getrandbits(32) + ), # thread-safe random + ), + ) + ) + return blob.put(data, lease_period=lease_period, silent=silent) diff --git a/azure_monitor/src/azure_monitor/trace.py b/azure_monitor/src/azure_monitor/trace.py deleted file mode 100644 index 7907b4a..0000000 --- a/azure_monitor/src/azure_monitor/trace.py +++ /dev/null @@ -1,161 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import json -import logging -from urllib.parse import urlparse - -import requests - -from azure_monitor import protocol, util -from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult -from opentelemetry.sdk.util import ns_to_iso_str -from opentelemetry.trace import Span, SpanKind - -logger = logging.getLogger(__name__) - - -class AzureMonitorSpanExporter(SpanExporter): - def __init__(self, **options): - self.options = util.Options(**options) - if not self.options.instrumentation_key: - raise ValueError("The instrumentation_key is not provided.") - - def export(self, spans): - envelopes = tuple(map(self.span_to_envelope, spans)) - - try: - response = requests.post( - url=self.options.endpoint, - data=json.dumps(envelopes), - headers={ - "Accept": "application/json", - "Content-Type": "application/json; charset=utf-8", - }, - timeout=self.options.timeout, - ) - except requests.RequestException as ex: - logger.warning("Transient client side error %s.", ex) - return SpanExportResult.FAILED_RETRYABLE - - text = "N/A" - data = None # noqa pylint: disable=unused-variable - try: - text = response.text - except Exception as ex: # noqa pylint: disable=broad-except - logger.warning("Error while reading response body %s.", ex) - else: - try: - data = json.loads(text) # noqa pylint: disable=unused-variable - except Exception: # noqa pylint: disable=broad-except - pass - - if response.status_code == 200: - logger.info("Transmission succeeded: %s.", text) - return SpanExportResult.SUCCESS - - if response.status_code in ( - 206, # Partial Content - 429, # Too Many Requests - 500, # Internal Server Error - 503, # Service Unavailable - ): - return SpanExportResult.FAILED_RETRYABLE - - return SpanExportResult.FAILED_NOT_RETRYABLE - - @staticmethod - def ns_to_duration(nanoseconds): - value = (nanoseconds + 500000) // 1000000 # duration in milliseconds - value, microseconds = divmod(value, 1000) - value, seconds = divmod(value, 60) - value, minutes = divmod(value, 60) - days, hours = divmod(value, 24) - return "{:d}.{:02d}:{:02d}:{:02d}.{:03d}".format( - days, hours, minutes, seconds, microseconds - ) - - def span_to_envelope(self, span): # noqa pylint: disable=too-many-branches - envelope = protocol.Envelope( - iKey=self.options.instrumentation_key, - tags=dict(util.azure_monitor_context), - time=ns_to_iso_str(span.start_time), - ) - envelope.tags["ai.operation.id"] = "{:032x}".format( - span.context.trace_id - ) - parent = span.parent - if isinstance(parent, Span): - parent = parent.context - if parent: - envelope.tags[ - "ai.operation.parentId" - ] = "|{:032x}.{:016x}.".format(parent.trace_id, parent.span_id) - if span.kind in (SpanKind.CONSUMER, SpanKind.SERVER): - envelope.name = "Microsoft.ApplicationInsights.Request" - data = protocol.Request( - id="|{:032x}.{:016x}.".format( - span.context.trace_id, span.context.span_id - ), - duration=self.ns_to_duration(span.end_time - span.start_time), - responseCode="0", - success=False, - properties={}, - ) - envelope.data = protocol.Data( - baseData=data, baseType="RequestData" - ) - if "http.method" in span.attributes: - data.name = span.attributes["http.method"] - if "http.route" in span.attributes: - data.name = data.name + " " + span.attributes["http.route"] - envelope.tags["ai.operation.name"] = data.name - if "http.url" in span.attributes: - data.url = span.attributes["http.url"] - if "http.status_code" in span.attributes: - status_code = span.attributes["http.status_code"] - data.responseCode = str(status_code) - data.success = 200 <= status_code < 400 - else: - envelope.name = "Microsoft.ApplicationInsights.RemoteDependency" - data = protocol.RemoteDependency( - name=span.name, - id="|{:032x}.{:016x}.".format( - span.context.trace_id, span.context.span_id - ), - resultCode="0", # TODO - duration=self.ns_to_duration(span.end_time - span.start_time), - success=True, # TODO - properties={}, - ) - envelope.data = protocol.Data( - baseData=data, baseType="RemoteDependencyData" - ) - if span.kind in (SpanKind.CLIENT, SpanKind.PRODUCER): - data.type = "HTTP" # TODO - if "http.url" in span.attributes: - url = span.attributes["http.url"] - # TODO: error handling, probably put scheme as well - data.name = urlparse(url).netloc - if "http.status_code" in span.attributes: - data.resultCode = str(span.attributes["http.status_code"]) - else: # SpanKind.INTERNAL - data.type = "InProc" - for key in span.attributes: - data.properties[key] = span.attributes[key] - if span.links: - links = [] - for link in span.links: - links.append( - { - "operation_Id": "{:032x}".format( - link.context.trace_id - ), - "id": "|{:032x}.{:016x}.".format( - link.context.trace_id, link.context.span_id - ), - } - ) - data.properties["_MS.links"] = json.dumps(links) - print(data.properties["_MS.links"]) - # TODO: tracestate, tags - return envelope diff --git a/azure_monitor/src/azure_monitor/util.py b/azure_monitor/src/azure_monitor/util.py deleted file mode 100644 index f885ba9..0000000 --- a/azure_monitor/src/azure_monitor/util.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -import locale -import os -import platform -import sys - -from azure_monitor.protocol import BaseObject -from azure_monitor.version import __version__ as ext_version -from opentelemetry.sdk.version import __version__ as opentelemetry_version - -azure_monitor_context = { - "ai.cloud.role": os.path.basename(sys.argv[0]) or "Python Application", - "ai.cloud.roleInstance": platform.node(), - "ai.device.id": platform.node(), - "ai.device.locale": locale.getdefaultlocale()[0], - "ai.device.osVersion": platform.version(), - "ai.device.type": "Other", - "ai.internal.sdkVersion": "py{}:ot{}:ext{}".format( - platform.python_version(), opentelemetry_version, ext_version - ), -} - - -class Options(BaseObject): - _default = BaseObject( - endpoint="https://dc.services.visualstudio.com/v2/track", - instrumentation_key=os.getenv("APPINSIGHTS_INSTRUMENTATIONKEY", None), - timeout=10.0, # networking timeout in seconds - ) diff --git a/azure_monitor/src/azure_monitor/utils.py b/azure_monitor/src/azure_monitor/utils.py new file mode 100644 index 0000000..ad7eb93 --- /dev/null +++ b/azure_monitor/src/azure_monitor/utils.py @@ -0,0 +1,71 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import locale +import os +import platform +import sys +import threading +import time + +from opentelemetry.sdk.version import __version__ as opentelemetry_version + +from azure_monitor.version import __version__ as ext_version + +azure_monitor_context = { + "ai.cloud.role": os.path.basename(sys.argv[0]) or "Python Application", + "ai.cloud.roleInstance": platform.node(), + "ai.device.id": platform.node(), + "ai.device.locale": locale.getdefaultlocale()[0], + "ai.device.osVersion": platform.version(), + "ai.device.type": "Other", + "ai.internal.sdkVersion": "py{}:ot{}:ext{}".format( + platform.python_version(), opentelemetry_version, ext_version + ), +} + + +def ns_to_duration(nanoseconds): + value = (nanoseconds + 500000) // 1000000 # duration in milliseconds + value, microseconds = divmod(value, 1000) + value, seconds = divmod(value, 60) + value, minutes = divmod(value, 60) + days, hours = divmod(value, 24) + return "{:d}.{:02d}:{:02d}:{:02d}.{:03d}".format( + days, hours, minutes, seconds, microseconds + ) + + +class PeriodicTask(threading.Thread): + """Thread that periodically calls a given function. + + :type interval: int or float + :param interval: Seconds between calls to the function. + + :type function: function + :param function: The function to call. + + :type args: list + :param args: The args passed in while calling `function`. + + :type kwargs: dict + :param args: The kwargs passed in while calling `function`. + """ + + def __init__(self, interval, function, args=None, kwargs=None): + super(PeriodicTask, self).__init__() + self.interval = interval + self.function = function + self.args = args or [] + self.kwargs = kwargs or {} + self.finished = threading.Event() + + def run(self): + wait_time = self.interval + while not self.finished.wait(wait_time): + start_time = time.time() + self.function(*self.args, **self.kwargs) + elapsed_time = time.time() - start_time + wait_time = max(self.interval - elapsed_time, 0) + + def cancel(self): + self.finished.set() diff --git a/azure_monitor/src/azure_monitor/version.py b/azure_monitor/src/azure_monitor/version.py index 9092865..3e3cc8e 100644 --- a/azure_monitor/src/azure_monitor/version.py +++ b/azure_monitor/src/azure_monitor/version.py @@ -1,3 +1,3 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. -__version__ = "0.1.dev0" +__version__ = "0.2b.0" diff --git a/azure_monitor/tests/__init__.py b/azure_monitor/tests/__init__.py index e69de29..5b7f7a9 100644 --- a/azure_monitor/tests/__init__.py +++ b/azure_monitor/tests/__init__.py @@ -0,0 +1,2 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. diff --git a/azure_monitor/tests/auto_collection/__init__.py b/azure_monitor/tests/auto_collection/__init__.py new file mode 100644 index 0000000..5b7f7a9 --- /dev/null +++ b/azure_monitor/tests/auto_collection/__init__.py @@ -0,0 +1,2 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. diff --git a/azure_monitor/tests/auto_collection/test_auto_collection.py b/azure_monitor/tests/auto_collection/test_auto_collection.py new file mode 100644 index 0000000..301d249 --- /dev/null +++ b/azure_monitor/tests/auto_collection/test_auto_collection.py @@ -0,0 +1,52 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import unittest +from unittest import mock + +from opentelemetry import metrics +from opentelemetry.sdk.metrics import MeterProvider + +from azure_monitor.sdk.auto_collection import AutoCollection + + +# pylint: disable=protected-access +class TestAutoCollection(unittest.TestCase): + @classmethod + def setUpClass(cls): + metrics.set_meter_provider(MeterProvider()) + cls._meter = metrics.get_meter(__name__) + kvp = {"environment": "staging"} + cls._test_label_set = cls._meter.get_label_set(kvp) + + @classmethod + def tearDownClass(cls): + metrics._METER_PROVIDER = None + + @mock.patch( + "azure_monitor.sdk.auto_collection.PerformanceMetrics", autospec=True + ) + @mock.patch( + "azure_monitor.sdk.auto_collection.DependencyMetrics", autospec=True + ) + @mock.patch( + "azure_monitor.sdk.auto_collection.RequestMetrics", autospec=True + ) + def test_constructor( + self, mock_performance, mock_dependencies, mock_requests + ): + """Test the constructor.""" + AutoCollection(meter=self._meter, label_set=self._test_label_set) + self.assertEqual(mock_performance.called, True) + self.assertEqual(mock_dependencies.called, True) + self.assertEqual(mock_requests.called, True) + self.assertEqual(mock_performance.call_args[0][0], self._meter) + self.assertEqual( + mock_performance.call_args[0][1], self._test_label_set + ) + self.assertEqual(mock_dependencies.call_args[0][0], self._meter) + self.assertEqual( + mock_dependencies.call_args[0][1], self._test_label_set + ) + self.assertEqual(mock_requests.call_args[0][0], self._meter) + self.assertEqual(mock_requests.call_args[0][1], self._test_label_set) diff --git a/azure_monitor/tests/auto_collection/test_dependency_metrics.py b/azure_monitor/tests/auto_collection/test_dependency_metrics.py new file mode 100644 index 0000000..fe273df --- /dev/null +++ b/azure_monitor/tests/auto_collection/test_dependency_metrics.py @@ -0,0 +1,128 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import unittest +from http.server import HTTPServer +from unittest import mock + +import requests +from opentelemetry import metrics +from opentelemetry.sdk.metrics import MeterProvider, Observer + +from azure_monitor.sdk.auto_collection import dependency_metrics + +ORIGINAL_FUNCTION = requests.Session.request +ORIGINAL_CONS = HTTPServer.__init__ + + +# pylint: disable=protected-access +class TestDependencyMetrics(unittest.TestCase): + @classmethod + def setUpClass(cls): + metrics.set_meter_provider(MeterProvider()) + cls._meter = metrics.get_meter(__name__) + kvp = {"environment": "staging"} + cls._test_label_set = cls._meter.get_label_set(kvp) + + @classmethod + def tearDown(cls): + metrics._METER_PROVIDER = None + requests.Session.request = ORIGINAL_FUNCTION + dependency_metrics.ORIGINAL_CONSTRUCTOR = ORIGINAL_CONS + + def setUp(self): + dependency_metrics.dependency_map.clear() + requests.Session.request = ORIGINAL_FUNCTION + dependency_metrics.ORIGINAL_CONSTRUCTOR = ORIGINAL_CONS + + def test_constructor(self): + mock_meter = mock.Mock() + metrics_collector = dependency_metrics.DependencyMetrics( + meter=mock_meter, label_set=self._test_label_set + ) + self.assertEqual(metrics_collector._meter, mock_meter) + self.assertEqual(metrics_collector._label_set, self._test_label_set) + self.assertEqual(mock_meter.register_observer.call_count, 1) + mock_meter.register_observer.assert_called_with( + callback=metrics_collector._track_dependency_rate, + name="\\ApplicationInsights\\Dependency Calls/Sec", + description="Outgoing Requests per second", + unit="rps", + value_type=int, + ) + + @mock.patch("azure_monitor.sdk.auto_collection.dependency_metrics.time") + def test_track_dependency_rate(self, time_mock): + time_mock.time.return_value = 100 + metrics_collector = dependency_metrics.DependencyMetrics( + meter=self._meter, label_set=self._test_label_set + ) + obs = Observer( + callback=metrics_collector._track_dependency_rate, + name="\\ApplicationInsights\\Dependency Calls/Sec", + description="Outgoing Requests per second", + unit="rps", + value_type=int, + meter=self._meter, + ) + dependency_metrics.dependency_map["last_time"] = 98 + dependency_metrics.dependency_map["count"] = 4 + metrics_collector._track_dependency_rate(obs) + self.assertEqual(obs.aggregators[self._test_label_set].current, 2) + + @mock.patch("azure_monitor.sdk.auto_collection.dependency_metrics.time") + def test_track_dependency_rate_time_none(self, time_mock): + time_mock.time.return_value = 100 + metrics_collector = dependency_metrics.DependencyMetrics( + meter=self._meter, label_set=self._test_label_set + ) + dependency_metrics.dependency_map["last_time"] = None + obs = Observer( + callback=metrics_collector._track_dependency_rate, + name="\\ApplicationInsights\\Dependency Calls/Sec", + description="Outgoing Requests per second", + unit="rps", + value_type=int, + meter=self._meter, + ) + metrics_collector._track_dependency_rate(obs) + self.assertEqual(obs.aggregators[self._test_label_set].current, 0) + + @mock.patch("azure_monitor.sdk.auto_collection.dependency_metrics.time") + def test_track_dependency_rate_error(self, time_mock): + time_mock.time.return_value = 100 + metrics_collector = dependency_metrics.DependencyMetrics( + meter=self._meter, label_set=self._test_label_set + ) + dependency_metrics.dependency_map["last_time"] = 100 + dependency_metrics.dependency_map["last_result"] = 5 + obs = Observer( + callback=metrics_collector._track_dependency_rate, + name="\\ApplicationInsights\\Dependency Calls/Sec", + description="Outgoing Requests per second", + unit="rps", + value_type=int, + meter=self._meter, + ) + metrics_collector._track_dependency_rate(obs) + self.assertEqual(obs.aggregators[self._test_label_set].current, 5) + + @mock.patch( + "azure_monitor.sdk.auto_collection.dependency_metrics.ORIGINAL_REQUEST" + ) + def test_dependency_patch(self, request_mock): + session = requests.Session() + dependency_metrics.dependency_patch(session) + self.assertEqual(dependency_metrics.dependency_map["count"], 1) + request_mock.assert_called_with(session) + + @mock.patch( + "azure_monitor.sdk.auto_collection.dependency_metrics.ORIGINAL_REQUEST" + ) + @mock.patch("azure_monitor.sdk.auto_collection.dependency_metrics.context") + def test_dependency_patch_suppress(self, context_mock, request_mock): + context_mock.get_value.return_value = {} + session = requests.Session() + dependency_metrics.dependency_patch(session) + self.assertEqual(dependency_metrics.dependency_map.get("count"), None) + request_mock.assert_called_with(session) diff --git a/azure_monitor/tests/auto_collection/test_performance_metrics.py b/azure_monitor/tests/auto_collection/test_performance_metrics.py new file mode 100644 index 0000000..6637913 --- /dev/null +++ b/azure_monitor/tests/auto_collection/test_performance_metrics.py @@ -0,0 +1,198 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import collections +import unittest +from unittest import mock + +from opentelemetry import metrics +from opentelemetry.sdk.metrics import MeterProvider, Observer + +from azure_monitor.sdk.auto_collection import PerformanceMetrics + + +def throw(exc_type, *args, **kwargs): + def func(*_args, **_kwargs): + raise exc_type(*args, **kwargs) + + return func + + +# pylint: disable=protected-access +class TestPerformanceMetrics(unittest.TestCase): + @classmethod + def setUpClass(cls): + metrics.set_meter_provider(MeterProvider()) + cls._meter = metrics.get_meter(__name__) + kvp = {"environment": "staging"} + cls._test_label_set = cls._meter.get_label_set(kvp) + + @classmethod + def tearDownClass(cls): + metrics._METER_PROVIDER = None + + def test_constructor(self): + mock_meter = mock.Mock() + performance_metrics_collector = PerformanceMetrics( + meter=mock_meter, label_set=self._test_label_set + ) + self.assertEqual(performance_metrics_collector._meter, mock_meter) + self.assertEqual( + performance_metrics_collector._label_set, self._test_label_set + ) + self.assertEqual(mock_meter.register_observer.call_count, 4) + reg_obs_calls = mock_meter.register_observer.call_args_list + reg_obs_calls[0].assert_called_with( + callback=performance_metrics_collector._track_cpu, + name="\\Processor(_Total)\\% Processor Time", + description="Processor time as a percentage", + unit="percentage", + value_type=float, + ) + reg_obs_calls[1].assert_called_with( + callback=performance_metrics_collector._track_memory, + name="\\Memory\\Available Bytes", + description="Amount of available memory in bytes", + unit="byte", + value_type=int, + ) + reg_obs_calls[2].assert_called_with( + callback=performance_metrics_collector._track_process_cpu, + name="\\Process(??APP_WIN32_PROC??)\\% Processor Time", + description="Process CPU usage as a percentage", + unit="percentage", + value_type=float, + ) + reg_obs_calls[3].assert_called_with( + callback=performance_metrics_collector._track_process_memory, + name="\\Process(??APP_WIN32_PROC??)\\Private Bytes", + description="Amount of memory process has used in bytes", + unit="byte", + value_type=int, + ) + + def test_track_cpu(self): + performance_metrics_collector = PerformanceMetrics( + meter=self._meter, label_set=self._test_label_set + ) + with mock.patch("psutil.cpu_times_percent") as processor_mock: + cpu = collections.namedtuple("cpu", "idle") + cpu_times = cpu(idle=94.5) + processor_mock.return_value = cpu_times + obs = Observer( + callback=performance_metrics_collector._track_cpu, + name="\\Processor(_Total)\\% Processor Time", + description="Processor time as a percentage", + unit="percentage", + value_type=float, + meter=self._meter, + ) + performance_metrics_collector._track_cpu(obs) + self.assertEqual( + obs.aggregators[self._test_label_set].current, 5.5 + ) + + @mock.patch("psutil.virtual_memory") + def test_track_memory(self, psutil_mock): + performance_metrics_collector = PerformanceMetrics( + meter=self._meter, label_set=self._test_label_set + ) + memory = collections.namedtuple("memory", "available") + vmem = memory(available=100) + psutil_mock.return_value = vmem + obs = Observer( + callback=performance_metrics_collector._track_memory, + name="\\Memory\\Available Bytes", + description="Amount of available memory in bytes", + unit="byte", + value_type=int, + meter=self._meter, + ) + performance_metrics_collector._track_memory(obs) + self.assertEqual(obs.aggregators[self._test_label_set].current, 100) + + @mock.patch("azure_monitor.sdk.auto_collection.performance_metrics.psutil") + def test_track_process_cpu(self, psutil_mock): + with mock.patch( + "azure_monitor.sdk.auto_collection.performance_metrics.PROCESS" + ) as process_mock: + performance_metrics_collector = PerformanceMetrics( + meter=self._meter, label_set=self._test_label_set + ) + process_mock.cpu_percent.return_value = 44.4 + psutil_mock.cpu_count.return_value = 2 + obs = Observer( + callback=performance_metrics_collector._track_process_cpu, + name="\\Process(??APP_WIN32_PROC??)\\% Processor Time", + description="Process CPU usage as a percentage", + unit="percentage", + value_type=float, + meter=self._meter, + ) + performance_metrics_collector._track_process_cpu(obs) + self.assertEqual( + obs.aggregators[self._test_label_set].current, 22.2 + ) + + @mock.patch("azure_monitor.sdk.auto_collection.performance_metrics.logger") + def test_track_process_cpu_exception(self, logger_mock): + with mock.patch( + "azure_monitor.sdk.auto_collection.performance_metrics.psutil" + ) as psutil_mock: + performance_metrics_collector = PerformanceMetrics( + meter=self._meter, label_set=self._test_label_set + ) + psutil_mock.cpu_count.return_value = None + obs = Observer( + callback=performance_metrics_collector._track_process_cpu, + name="\\Process(??APP_WIN32_PROC??)\\% Processor Time", + description="Process CPU usage as a percentage", + unit="percentage", + value_type=float, + meter=self._meter, + ) + performance_metrics_collector._track_process_cpu(obs) + self.assertEqual(logger_mock.exception.called, True) + + def test_track_process_memory(self): + with mock.patch( + "azure_monitor.sdk.auto_collection.performance_metrics.PROCESS" + ) as process_mock: + performance_metrics_collector = PerformanceMetrics( + meter=self._meter, label_set=self._test_label_set + ) + memory = collections.namedtuple("memory", "rss") + pmem = memory(rss=100) + process_mock.memory_info.return_value = pmem + obs = Observer( + callback=performance_metrics_collector._track_process_memory, + name="\\Process(??APP_WIN32_PROC??)\\Private Bytes", + description="Amount of memory process has used in bytes", + unit="byte", + value_type=int, + meter=self._meter, + ) + performance_metrics_collector._track_process_memory(obs) + self.assertEqual( + obs.aggregators[self._test_label_set].current, 100 + ) + + @mock.patch("azure_monitor.sdk.auto_collection.performance_metrics.logger") + def test_track_process_memory_exception(self, logger_mock): + with mock.patch( + "azure_monitor.sdk.auto_collection.performance_metrics.PROCESS", + throw(Exception), + ): + performance_metrics_collector = PerformanceMetrics( + meter=self._meter, label_set=self._test_label_set + ) + obs = Observer( + callback=performance_metrics_collector._track_process_memory, + name="\\Process(??APP_WIN32_PROC??)\\Private Bytes", + description="Amount of memory process has used in bytes", + unit="byte", + value_type=int, + meter=self._meter, + ) + performance_metrics_collector._track_process_memory(obs) + self.assertEqual(logger_mock.exception.called, True) diff --git a/azure_monitor/tests/auto_collection/test_request_metrics.py b/azure_monitor/tests/auto_collection/test_request_metrics.py new file mode 100644 index 0000000..63030d1 --- /dev/null +++ b/azure_monitor/tests/auto_collection/test_request_metrics.py @@ -0,0 +1,217 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import unittest +from http.server import HTTPServer +from unittest import mock + +import requests +from opentelemetry import metrics +from opentelemetry.sdk.metrics import MeterProvider, Observer + +from azure_monitor.sdk.auto_collection import request_metrics + +ORIGINAL_FUNCTION = requests.Session.request +ORIGINAL_CONS = HTTPServer.__init__ + + +# pylint: disable=protected-access +class TestRequestMetrics(unittest.TestCase): + @classmethod + def setUpClass(cls): + metrics.set_meter_provider(MeterProvider()) + cls._meter = metrics.get_meter(__name__) + kvp = {"environment": "staging"} + cls._test_label_set = cls._meter.get_label_set(kvp) + + @classmethod + def tearDown(cls): + metrics._METER_PROVIDER = None + + def setUp(self): + request_metrics.requests_map.clear() + requests.Session.request = ORIGINAL_FUNCTION + request_metrics.ORIGINAL_CONSTRUCTOR = ORIGINAL_CONS + + def test_constructor(self): + mock_meter = mock.Mock() + request_metrics_collector = request_metrics.RequestMetrics( + meter=mock_meter, label_set=self._test_label_set + ) + self.assertEqual(request_metrics_collector._meter, mock_meter) + self.assertEqual( + request_metrics_collector._label_set, self._test_label_set + ) + + self.assertEqual(mock_meter.register_observer.call_count, 2) + + create_metric_calls = mock_meter.register_observer.call_args_list + + create_metric_calls[0].assert_called_with( + callback=request_metrics_collector._track_request_duration, + name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Request Execution Time", + description="Incoming Requests Average Execution Time", + unit="milliseconds", + value_type=int, + ) + + create_metric_calls[1].assert_called_with( + callback=request_metrics_collector._track_request_rate, + name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Requests/Sec", + description="Incoming Requests Average Execution Rate", + unit="rps", + value_type=int, + ) + + def test_track_request_duration(self): + request_metrics_collector = request_metrics.RequestMetrics( + meter=self._meter, label_set=self._test_label_set + ) + request_metrics.requests_map["duration"] = 0.1 + request_metrics.requests_map["count"] = 10 + request_metrics.requests_map["last_count"] = 5 + obs = Observer( + callback=request_metrics_collector._track_request_duration, + name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Request Execution Time", + description="Incoming Requests Average Execution Time", + unit="milliseconds", + value_type=int, + meter=self._meter, + ) + request_metrics_collector._track_request_duration(obs) + self.assertEqual(obs.aggregators[self._test_label_set].current, 20) + + def test_track_request_duration_error(self): + request_metrics_collector = request_metrics.RequestMetrics( + meter=self._meter, label_set=self._test_label_set + ) + request_metrics.requests_map["duration"] = 0.1 + request_metrics.requests_map["count"] = 10 + request_metrics.requests_map["last_count"] = 10 + obs = Observer( + callback=request_metrics_collector._track_request_duration, + name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Request Execution Time", + description="Incoming Requests Average Execution Time", + unit="milliseconds", + value_type=int, + meter=self._meter, + ) + request_metrics_collector._track_request_duration(obs) + self.assertEqual(obs.aggregators[self._test_label_set].current, 0) + + @mock.patch("azure_monitor.sdk.auto_collection.request_metrics.time") + def test_track_request_rate(self, time_mock): + request_metrics_collector = request_metrics.RequestMetrics( + meter=self._meter, label_set=self._test_label_set + ) + time_mock.time.return_value = 100 + request_metrics.requests_map["last_time"] = 98 + request_metrics.requests_map["count"] = 4 + obs = Observer( + callback=request_metrics_collector._track_request_rate, + name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Requests/Sec", + description="Incoming Requests Average Execution Rate", + unit="rps", + value_type=int, + meter=self._meter, + ) + request_metrics_collector._track_request_rate(obs) + self.assertEqual(obs.aggregators[self._test_label_set].current, 2) + + @mock.patch("azure_monitor.sdk.auto_collection.request_metrics.time") + def test_track_request_rate_time_none(self, time_mock): + time_mock.time.return_value = 100 + request_metrics_collector = request_metrics.RequestMetrics( + meter=self._meter, label_set=self._test_label_set + ) + request_metrics.requests_map["last_time"] = None + obs = Observer( + callback=request_metrics_collector._track_request_rate, + name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Requests/Sec", + description="Incoming Requests Average Execution Rate", + unit="rps", + value_type=int, + meter=self._meter, + ) + request_metrics_collector._track_request_rate(obs) + self.assertEqual(obs.aggregators[self._test_label_set].current, 0) + + @mock.patch("azure_monitor.sdk.auto_collection.request_metrics.time") + def test_track_request_rate_error(self, time_mock): + request_metrics_collector = request_metrics.RequestMetrics( + meter=self._meter, label_set=self._test_label_set + ) + time_mock.time.return_value = 100 + request_metrics.requests_map["last_rate"] = 5 + request_metrics.requests_map["last_time"] = 100 + obs = Observer( + callback=request_metrics_collector._track_request_rate, + name="\\ASP.NET Applications(??APP_W3SVC_PROC??)\\Requests/Sec", + description="Incoming Requests Average Execution Rate", + unit="rps", + value_type=int, + meter=self._meter, + ) + request_metrics_collector._track_request_rate(obs) + self.assertEqual(obs.aggregators[self._test_label_set].current, 5) + + def test_request_patch(self): + map = request_metrics.requests_map # pylint: disable=redefined-builtin + func = mock.Mock() + new_func = request_metrics.request_patch(func) + new_func() + + self.assertEqual(map["count"], 1) + self.assertIsNotNone(map["duration"]) + self.assertEqual(len(func.call_args_list), 1) + + def test_server_patch(self): + request_metrics.ORIGINAL_CONSTRUCTOR = lambda x, y, z: None + with mock.patch( + "azure_monitor.sdk.auto_collection.request_metrics.request_patch" + ) as request_mock: + handler = mock.Mock() + handler.do_DELETE.return_value = None + handler.do_GET.return_value = None + handler.do_HEAD.return_value = None + handler.do_OPTIONS.return_value = None + handler.do_POST.return_value = None + handler.do_PUT.return_value = None + result = request_metrics.server_patch(None, None, handler) + handler.do_DELETE() + handler.do_GET() + handler.do_HEAD() + handler.do_OPTIONS() + handler.do_POST() + handler.do_PUT() + + self.assertEqual(result, None) + self.assertEqual(len(request_mock.call_args_list), 6) + + def test_server_patch_no_methods(self): + request_metrics.ORIGINAL_CONSTRUCTOR = lambda x, y, z: None + with mock.patch( + "azure_monitor.sdk.auto_collection.request_metrics.request_patch" + ) as request_mock: + handler = mock.Mock() + result = request_metrics.server_patch(None, None, handler) + handler.do_DELETE() + handler.do_GET() + handler.do_HEAD() + handler.do_OPTIONS() + handler.do_POST() + handler.do_PUT() + + self.assertEqual(result, None) + self.assertEqual(len(request_mock.call_args_list), 0) + + def test_server_patch_no_args(self): + request_metrics.ORIGINAL_CONSTRUCTOR = lambda x, y: None + req = request_metrics.server_patch(None, None) + + self.assertEqual(req, None) + + def test_server_patch_no_handler(self): + request_metrics.ORIGINAL_CONSTRUCTOR = lambda x, y, z: None + req = request_metrics.server_patch(None, None, None) + self.assertEqual(req, None) diff --git a/azure_monitor/tests/metrics/__init__.py b/azure_monitor/tests/metrics/__init__.py new file mode 100644 index 0000000..5b7f7a9 --- /dev/null +++ b/azure_monitor/tests/metrics/__init__.py @@ -0,0 +1,2 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. diff --git a/azure_monitor/tests/metrics/test_metrics.py b/azure_monitor/tests/metrics/test_metrics.py new file mode 100644 index 0000000..1984c3b --- /dev/null +++ b/azure_monitor/tests/metrics/test_metrics.py @@ -0,0 +1,315 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import os +import shutil +import unittest +from unittest import mock + +from opentelemetry import metrics +from opentelemetry.sdk.metrics import Counter, Measure, MeterProvider +from opentelemetry.sdk.metrics.export import MetricRecord, MetricsExportResult +from opentelemetry.sdk.metrics.export.aggregate import ( + CounterAggregator, + MinMaxSumCountAggregator, + ObserverAggregator, +) +from opentelemetry.sdk.util import ns_to_iso_str + +from azure_monitor.export import ExportResult +from azure_monitor.export.metrics import AzureMonitorMetricsExporter +from azure_monitor.options import ExporterOptions +from azure_monitor.protocol import Data, DataPoint, Envelope, MetricData + +TEST_FOLDER = os.path.abspath(".test.exporter.trace") +STORAGE_PATH = os.path.join(TEST_FOLDER) + + +# pylint: disable=invalid-name +def setUpModule(): + os.makedirs(TEST_FOLDER) + + +# pylint: disable=invalid-name +def tearDownModule(): + shutil.rmtree(TEST_FOLDER) + + +def throw(exc_type, *args, **kwargs): + def func(*_args, **_kwargs): + raise exc_type(*args, **kwargs) + + return func + + +# pylint: disable=protected-access +class TestAzureMetricsExporter(unittest.TestCase): + @classmethod + def setUpClass(cls): + os.environ[ + "APPINSIGHTS_INSTRUMENTATIONKEY" + ] = "1234abcd-5678-4efa-8abc-1234567890ab" + cls._exporter = AzureMonitorMetricsExporter(storage_path=STORAGE_PATH) + + metrics.set_meter_provider(MeterProvider()) + cls._meter = metrics.get_meter(__name__) + cls._test_metric = cls._meter.create_metric( + "testname", "testdesc", "unit", int, Counter, ["environment"] + ) + cls._test_measure = cls._meter.create_metric( + "testname", "testdesc", "unit", int, Measure, ["environment"] + ) + cls._test_obs = cls._meter.register_observer( + lambda x: x, + "testname", + "testdesc", + "unit", + int, + Counter, + ["environment"], + ) + kvp = {"environment": "staging"} + cls._test_label_set = cls._meter.get_label_set(kvp) + + def setUp(self): + for filename in os.listdir(STORAGE_PATH): + file_path = os.path.join(STORAGE_PATH, filename) + try: + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) + elif os.path.isdir(file_path): + shutil.rmtree(file_path) + except OSError as e: + print("Failed to delete %s. Reason: %s" % (file_path, e)) + + @classmethod + def tearDownClass(cls): + metrics._METER_PROVIDER = None + + def test_constructor(self): + """Test the constructor.""" + exporter = AzureMonitorMetricsExporter( + instrumentation_key="4321abcd-5678-4efa-8abc-1234567890ab", + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + self.assertIsInstance(exporter.options, ExporterOptions) + self.assertEqual( + exporter.options.instrumentation_key, + "4321abcd-5678-4efa-8abc-1234567890ab", + ) + + def test_export(self,): + record = MetricRecord( + CounterAggregator(), self._test_label_set, self._test_metric + ) + exporter = self._exporter + with mock.patch( + "azure_monitor.export.metrics.AzureMonitorMetricsExporter._transmit" + ) as transmit: # noqa: E501 + transmit.return_value = ExportResult.SUCCESS + result = exporter.export([record]) + self.assertEqual(result, MetricsExportResult.SUCCESS) + + def test_export_failed_retryable(self): + record = MetricRecord( + CounterAggregator(), self._test_label_set, self._test_metric + ) + exporter = self._exporter + with mock.patch( + "azure_monitor.export.metrics.AzureMonitorMetricsExporter._transmit" + ) as transmit: # noqa: E501 + transmit.return_value = ExportResult.FAILED_RETRYABLE + storage_mock = mock.Mock() + exporter.storage.put = storage_mock + result = exporter.export([record]) + self.assertEqual(result, MetricsExportResult.FAILED_RETRYABLE) + self.assertEqual(storage_mock.call_count, 1) + + @mock.patch("azure_monitor.export.metrics.logger") + def test_export_exception(self, logger_mock): + record = MetricRecord( + CounterAggregator(), self._test_label_set, self._test_metric + ) + exporter = self._exporter + with mock.patch( + "azure_monitor.export.metrics.AzureMonitorMetricsExporter._transmit", + throw(Exception), + ): # noqa: E501 + result = exporter.export([record]) + self.assertEqual(result, MetricsExportResult.FAILED_NOT_RETRYABLE) + self.assertEqual(logger_mock.exception.called, True) + + def test_metric_to_envelope_none(self): + exporter = self._exporter + self.assertIsNone(exporter._metric_to_envelope(None)) + + def test_metric_to_envelope(self): + aggregator = CounterAggregator() + aggregator.update(123) + aggregator.take_checkpoint() + record = MetricRecord( + aggregator, self._test_label_set, self._test_metric + ) + exporter = self._exporter + envelope = exporter._metric_to_envelope(record) + self.assertIsInstance(envelope, Envelope) + self.assertEqual(envelope.ver, 1) + self.assertEqual(envelope.name, "Microsoft.ApplicationInsights.Metric") + self.assertEqual( + envelope.time, + ns_to_iso_str( + record.metric.bind(record.label_set).last_update_timestamp + ), + ) + self.assertEqual(envelope.sample_rate, None) + self.assertEqual(envelope.seq, None) + self.assertEqual(envelope.ikey, "1234abcd-5678-4efa-8abc-1234567890ab") + self.assertEqual(envelope.flags, None) + + self.assertIsInstance(envelope.data, Data) + self.assertIsInstance(envelope.data.base_data, MetricData) + self.assertEqual(envelope.data.base_data.ver, 2) + self.assertEqual(len(envelope.data.base_data.metrics), 1) + self.assertIsInstance(envelope.data.base_data.metrics[0], DataPoint) + self.assertEqual(envelope.data.base_data.metrics[0].ns, "testdesc") + self.assertEqual(envelope.data.base_data.metrics[0].name, "testname") + self.assertEqual(envelope.data.base_data.metrics[0].value, 123) + self.assertEqual( + envelope.data.base_data.properties["environment"], "staging" + ) + self.assertIsNotNone(envelope.tags["ai.cloud.role"]) + self.assertIsNotNone(envelope.tags["ai.cloud.roleInstance"]) + self.assertIsNotNone(envelope.tags["ai.device.id"]) + self.assertIsNotNone(envelope.tags["ai.device.locale"]) + self.assertIsNotNone(envelope.tags["ai.device.osVersion"]) + self.assertIsNotNone(envelope.tags["ai.device.type"]) + self.assertIsNotNone(envelope.tags["ai.internal.sdkVersion"]) + + def test_observer_to_envelope(self): + aggregator = ObserverAggregator() + aggregator.update(123) + aggregator.take_checkpoint() + record = MetricRecord(aggregator, self._test_label_set, self._test_obs) + exporter = self._exporter + envelope = exporter._metric_to_envelope(record) + self.assertIsInstance(envelope, Envelope) + self.assertEqual(envelope.ver, 1) + self.assertEqual(envelope.name, "Microsoft.ApplicationInsights.Metric") + # TODO: implement last updated timestamp for observer + # self.assertEqual( + # envelope.time, + # ns_to_iso_str( + # record.metric.bind( + # record.label_set + # ).last_update_timestamp + # ), + # ) + self.assertEqual(envelope.sample_rate, None) + self.assertEqual(envelope.seq, None) + self.assertEqual(envelope.ikey, "1234abcd-5678-4efa-8abc-1234567890ab") + self.assertEqual(envelope.flags, None) + + self.assertIsInstance(envelope.data, Data) + self.assertIsInstance(envelope.data.base_data, MetricData) + self.assertEqual(envelope.data.base_data.ver, 2) + self.assertEqual(len(envelope.data.base_data.metrics), 1) + self.assertIsInstance(envelope.data.base_data.metrics[0], DataPoint) + self.assertEqual(envelope.data.base_data.metrics[0].ns, "testdesc") + self.assertEqual(envelope.data.base_data.metrics[0].name, "testname") + self.assertEqual(envelope.data.base_data.metrics[0].value, 123) + self.assertEqual( + envelope.data.base_data.properties["environment"], "staging" + ) + self.assertIsNotNone(envelope.tags["ai.cloud.role"]) + self.assertIsNotNone(envelope.tags["ai.cloud.roleInstance"]) + self.assertIsNotNone(envelope.tags["ai.device.id"]) + self.assertIsNotNone(envelope.tags["ai.device.locale"]) + self.assertIsNotNone(envelope.tags["ai.device.osVersion"]) + self.assertIsNotNone(envelope.tags["ai.device.type"]) + self.assertIsNotNone(envelope.tags["ai.internal.sdkVersion"]) + + def test_observer_to_envelope_value_none(self): + aggregator = ObserverAggregator() + aggregator.update(None) + aggregator.take_checkpoint() + record = MetricRecord(aggregator, self._test_label_set, self._test_obs) + exporter = self._exporter + envelope = exporter._metric_to_envelope(record) + self.assertIsInstance(envelope, Envelope) + self.assertEqual(envelope.ver, 1) + self.assertEqual(envelope.name, "Microsoft.ApplicationInsights.Metric") + # TODO: implement last updated timestamp for observer + # self.assertEqual( + # envelope.time, + # ns_to_iso_str( + # record.metric.bind( + # record.label_set + # ).last_update_timestamp + # ), + # ) + self.assertEqual(envelope.sample_rate, None) + self.assertEqual(envelope.seq, None) + self.assertEqual(envelope.ikey, "1234abcd-5678-4efa-8abc-1234567890ab") + self.assertEqual(envelope.flags, None) + + self.assertIsInstance(envelope.data, Data) + self.assertIsInstance(envelope.data.base_data, MetricData) + self.assertEqual(envelope.data.base_data.ver, 2) + self.assertEqual(len(envelope.data.base_data.metrics), 1) + self.assertIsInstance(envelope.data.base_data.metrics[0], DataPoint) + self.assertEqual(envelope.data.base_data.metrics[0].ns, "testdesc") + self.assertEqual(envelope.data.base_data.metrics[0].name, "testname") + self.assertEqual(envelope.data.base_data.metrics[0].value, 0) + self.assertEqual( + envelope.data.base_data.properties["environment"], "staging" + ) + self.assertIsNotNone(envelope.tags["ai.cloud.role"]) + self.assertIsNotNone(envelope.tags["ai.cloud.roleInstance"]) + self.assertIsNotNone(envelope.tags["ai.device.id"]) + self.assertIsNotNone(envelope.tags["ai.device.locale"]) + self.assertIsNotNone(envelope.tags["ai.device.osVersion"]) + self.assertIsNotNone(envelope.tags["ai.device.type"]) + self.assertIsNotNone(envelope.tags["ai.internal.sdkVersion"]) + + @mock.patch("azure_monitor.export.metrics.logger") + def test_measure_to_envelope(self, logger_mock): + aggregator = MinMaxSumCountAggregator() + aggregator.update(123) + aggregator.take_checkpoint() + record = MetricRecord( + aggregator, self._test_label_set, self._test_measure + ) + exporter = self._exporter + envelope = exporter._metric_to_envelope(record) + self.assertIsInstance(envelope, Envelope) + self.assertEqual(envelope.ver, 1) + self.assertEqual(envelope.name, "Microsoft.ApplicationInsights.Metric") + self.assertEqual( + envelope.time, + ns_to_iso_str( + record.metric.bind(record.label_set).last_update_timestamp + ), + ) + self.assertEqual(envelope.sample_rate, None) + self.assertEqual(envelope.seq, None) + self.assertEqual(envelope.ikey, "1234abcd-5678-4efa-8abc-1234567890ab") + self.assertEqual(envelope.flags, None) + + self.assertIsInstance(envelope.data, Data) + self.assertIsInstance(envelope.data.base_data, MetricData) + self.assertEqual(envelope.data.base_data.ver, 2) + self.assertEqual(len(envelope.data.base_data.metrics), 1) + self.assertIsInstance(envelope.data.base_data.metrics[0], DataPoint) + self.assertEqual(envelope.data.base_data.metrics[0].ns, "testdesc") + self.assertEqual(envelope.data.base_data.metrics[0].name, "testname") + self.assertEqual(envelope.data.base_data.metrics[0].value, 0) + self.assertEqual( + envelope.data.base_data.properties["environment"], "staging" + ) + self.assertIsNotNone(envelope.tags["ai.cloud.role"]) + self.assertIsNotNone(envelope.tags["ai.cloud.roleInstance"]) + self.assertIsNotNone(envelope.tags["ai.device.id"]) + self.assertIsNotNone(envelope.tags["ai.device.locale"]) + self.assertIsNotNone(envelope.tags["ai.device.osVersion"]) + self.assertIsNotNone(envelope.tags["ai.device.type"]) + self.assertIsNotNone(envelope.tags["ai.internal.sdkVersion"]) + self.assertEqual(logger_mock.warning.called, True) diff --git a/azure_monitor/tests/test_base_exporter.py b/azure_monitor/tests/test_base_exporter.py new file mode 100644 index 0000000..8cbb6ab --- /dev/null +++ b/azure_monitor/tests/test_base_exporter.py @@ -0,0 +1,372 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import json +import os +import shutil +import unittest +from unittest import mock + +from opentelemetry.sdk.metrics.export import MetricsExportResult +from opentelemetry.sdk.trace.export import SpanExportResult + +from azure_monitor.export import ( + BaseExporter, + ExportResult, + get_metrics_export_result, + get_trace_export_result, +) +from azure_monitor.options import ExporterOptions +from azure_monitor.protocol import Data, Envelope + +TEST_FOLDER = os.path.abspath(".test.exporter.base") +STORAGE_PATH = os.path.join(TEST_FOLDER) + + +# pylint: disable=invalid-name +def setUpModule(): + os.makedirs(TEST_FOLDER) + + +# pylint: disable=invalid-name +def tearDownModule(): + shutil.rmtree(TEST_FOLDER) + + +def throw(exc_type, *args, **kwargs): + def func(*_args, **_kwargs): + raise exc_type(*args, **kwargs) + + return func + + +# pylint: disable=W0212 +# pylint: disable=R0904 +class TestBaseExporter(unittest.TestCase): + @classmethod + def setUpClass(cls): + os.environ[ + "APPINSIGHTS_INSTRUMENTATIONKEY" + ] = "1234abcd-5678-4efa-8abc-1234567890ab" + cls._base = BaseExporter(storage_path=STORAGE_PATH) + + def setUp(self): + for filename in os.listdir(STORAGE_PATH): + file_path = os.path.join(STORAGE_PATH, filename) + try: + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) + elif os.path.isdir(file_path): + shutil.rmtree(file_path) + except OSError as e: + print("Failed to delete %s. Reason: %s" % (file_path, e)) + self._base.clear_telemetry_processors() + + def test_constructor(self): + """Test the constructor.""" + base = BaseExporter( + instrumentation_key="4321abcd-5678-4efa-8abc-1234567890ab", + storage_maintenance_period=2, + storage_max_size=3, + storage_path=os.path.join(TEST_FOLDER, self.id()), + storage_retention_period=4, + timeout=5, + ) + self.assertIsInstance(base.options, ExporterOptions) + self.assertEqual( + base.options.instrumentation_key, + "4321abcd-5678-4efa-8abc-1234567890ab", + ) + self.assertEqual(base.options.storage_maintenance_period, 2) + self.assertEqual(base.options.storage_max_size, 3) + self.assertEqual(base.options.storage_retention_period, 4) + self.assertEqual(base.options.timeout, 5) + self.assertEqual( + base.options.storage_path, os.path.join(TEST_FOLDER, self.id()) + ) + + def test_constructor_wrong_options(self): + """Test the constructor with wrong options.""" + with self.assertRaises(TypeError): + BaseExporter(something_else=6) + + def test_telemetry_processor_add(self): + base = self._base + base.add_telemetry_processor(lambda: True) + self.assertEqual(len(base._telemetry_processors), 1) + + def test_telemetry_processor_clear(self): + base = self._base + base.add_telemetry_processor(lambda: True) + self.assertEqual(len(base._telemetry_processors), 1) + base.clear_telemetry_processors() + self.assertEqual(len(base._telemetry_processors), 0) + + def test_telemetry_processor_apply(self): + base = self._base + + def callback_function(envelope): + envelope.data.base_type += "_world" + + base.add_telemetry_processor(callback_function) + envelope = Envelope(data=Data(base_type="type1")) + base._apply_telemetry_processors([envelope]) + self.assertEqual(envelope.data.base_type, "type1_world") + + def test_telemetry_processor_apply_multiple(self): + base = self._base + base._telemetry_processors = [] + + def callback_function(envelope): + envelope.data.base_type += "_world" + + def callback_function2(envelope): + envelope.data.base_type += "_world2" + + base.add_telemetry_processor(callback_function) + base.add_telemetry_processor(callback_function2) + envelope = Envelope(data=Data(base_type="type1")) + base._apply_telemetry_processors([envelope]) + self.assertEqual(envelope.data.base_type, "type1_world_world2") + + def test_telemetry_processor_apply_exception(self): + base = self._base + + def callback_function(envelope): + raise ValueError() + + def callback_function2(envelope): + envelope.data.base_type += "_world2" + + base.add_telemetry_processor(callback_function) + base.add_telemetry_processor(callback_function2) + envelope = Envelope(data=Data(base_type="type1")) + base._apply_telemetry_processors([envelope]) + self.assertEqual(envelope.data.base_type, "type1_world2") + + def test_telemetry_processor_apply_not_accepted(self): + base = self._base + + def callback_function(envelope): + return envelope.data.base_type == "type2" + + base.add_telemetry_processor(callback_function) + envelope = Envelope(data=Data(base_type="type1")) + envelope2 = Envelope(data=Data(base_type="type2")) + envelopes = base._apply_telemetry_processors([envelope, envelope2]) + self.assertEqual(len(envelopes), 1) + self.assertEqual(envelopes[0].data.base_type, "type2") + + def test_transmission_nothing(self): + exporter = BaseExporter( + storage_path=os.path.join(TEST_FOLDER, self.id()) + ) + with mock.patch("requests.post") as post: + post.return_value = None + exporter._transmit_from_storage() + + def test_transmit_request_exception(self): + exporter = BaseExporter( + storage_path=os.path.join(TEST_FOLDER, self.id()) + ) + envelopes_to_export = map(lambda x: x.to_dict(), tuple([Envelope()])) + exporter.storage.put(envelopes_to_export) + with mock.patch("requests.post", throw(Exception)): + exporter._transmit_from_storage() + self.assertIsNone(exporter.storage.get()) + self.assertEqual(len(os.listdir(exporter.storage.path)), 1) + + @mock.patch("requests.post", return_value=mock.Mock()) + def test_transmission_lease_failure(self, requests_mock): + requests_mock.return_value = MockResponse(200, "unknown") + exporter = BaseExporter( + storage_path=os.path.join(TEST_FOLDER, self.id()) + ) + envelopes_to_export = map(lambda x: x.to_dict(), tuple([Envelope()])) + exporter.storage.put(envelopes_to_export) + with mock.patch( + "azure_monitor.storage.LocalFileBlob.lease" + ) as lease: # noqa: E501 + lease.return_value = False + exporter._transmit_from_storage() + self.assertTrue(exporter.storage.get()) + + def test_(self): + exporter = BaseExporter( + storage_path=os.path.join(TEST_FOLDER, self.id()) + ) + envelopes_to_export = map(lambda x: x.to_dict(), tuple([Envelope()])) + exporter.storage.put(envelopes_to_export) + with mock.patch("requests.post") as post: + post.return_value = MockResponse(200, None) + del post.return_value.text + exporter._transmit_from_storage() + self.assertIsNone(exporter.storage.get()) + self.assertEqual(len(os.listdir(exporter.storage.path)), 0) + + def test_transmission_200(self): + exporter = BaseExporter( + storage_path=os.path.join(TEST_FOLDER, self.id()) + ) + envelopes_to_export = map(lambda x: x.to_dict(), tuple([Envelope()])) + exporter.storage.put(envelopes_to_export) + with mock.patch("requests.post") as post: + post.return_value = MockResponse(200, "unknown") + exporter._transmit_from_storage() + self.assertIsNone(exporter.storage.get()) + self.assertEqual(len(os.listdir(exporter.storage.path)), 0) + + def test_transmission_206(self): + exporter = BaseExporter( + storage_path=os.path.join(TEST_FOLDER, self.id()) + ) + envelopes_to_export = map(lambda x: x.to_dict(), tuple([Envelope()])) + exporter.storage.put(envelopes_to_export) + with mock.patch("requests.post") as post: + post.return_value = MockResponse(206, "unknown") + exporter._transmit_from_storage() + self.assertIsNone(exporter.storage.get()) + self.assertEqual(len(os.listdir(exporter.storage.path)), 1) + + def test_transmission_206_500(self): + exporter = BaseExporter( + storage_path=os.path.join(TEST_FOLDER, self.id()) + ) + test_envelope = Envelope(name="testEnvelope") + envelopes_to_export = map( + lambda x: x.to_dict(), + tuple([Envelope(), Envelope(), test_envelope]), + ) + exporter.storage.put(envelopes_to_export) + with mock.patch("requests.post") as post: + post.return_value = MockResponse( + 206, + json.dumps( + { + "itemsReceived": 5, + "itemsAccepted": 3, + "errors": [ + {"index": 0, "statusCode": 400, "message": ""}, + { + "index": 2, + "statusCode": 500, + "message": "Internal Server Error", + }, + ], + } + ), + ) + exporter._transmit_from_storage() + self.assertEqual(len(os.listdir(exporter.storage.path)), 1) + self.assertEqual( + exporter.storage.get().get()[0]["name"], "testEnvelope" + ) + + def test_transmission_206_no_retry(self): + exporter = BaseExporter( + storage_path=os.path.join(TEST_FOLDER, self.id()) + ) + envelopes_to_export = map(lambda x: x.to_dict(), tuple([Envelope()])) + exporter.storage.put(envelopes_to_export) + with mock.patch("requests.post") as post: + post.return_value = MockResponse( + 206, + json.dumps( + { + "itemsReceived": 3, + "itemsAccepted": 2, + "errors": [ + {"index": 0, "statusCode": 400, "message": ""} + ], + } + ), + ) + exporter._transmit_from_storage() + self.assertEqual(len(os.listdir(exporter.storage.path)), 0) + + def test_transmission_206_bogus(self): + exporter = BaseExporter( + storage_path=os.path.join(TEST_FOLDER, self.id()) + ) + envelopes_to_export = map(lambda x: x.to_dict(), tuple([Envelope()])) + exporter.storage.put(envelopes_to_export) + with mock.patch("requests.post") as post: + post.return_value = MockResponse( + 206, + json.dumps( + { + "itemsReceived": 5, + "itemsAccepted": 3, + "errors": [{"foo": 0, "bar": 1}], + } + ), + ) + exporter._transmit_from_storage() + self.assertIsNone(exporter.storage.get()) + self.assertEqual(len(os.listdir(exporter.storage.path)), 0) + + def test_transmission_400(self): + exporter = BaseExporter( + storage_path=os.path.join(TEST_FOLDER, self.id()) + ) + envelopes_to_export = map(lambda x: x.to_dict(), tuple([Envelope()])) + exporter.storage.put(envelopes_to_export) + with mock.patch("requests.post") as post: + post.return_value = MockResponse(400, "{}") + exporter._transmit_from_storage() + self.assertEqual(len(os.listdir(exporter.storage.path)), 0) + + def test_transmission_500(self): + exporter = BaseExporter( + storage_path=os.path.join(TEST_FOLDER, self.id()) + ) + envelopes_to_export = map(lambda x: x.to_dict(), tuple([Envelope()])) + exporter.storage.put(envelopes_to_export) + with mock.patch("requests.post") as post: + post.return_value = MockResponse(500, "{}") + exporter._transmit_from_storage() + self.assertIsNone(exporter.storage.get()) + self.assertEqual(len(os.listdir(exporter.storage.path)), 1) + + def test_transmission_empty(self): + exporter = BaseExporter( + storage_path=os.path.join(TEST_FOLDER, self.id()) + ) + status = exporter._transmit([]) + self.assertEqual(status, ExportResult.SUCCESS) + + def test_get_trace_export_result(self): + self.assertEqual( + get_trace_export_result(ExportResult.SUCCESS), + SpanExportResult.SUCCESS, + ) + self.assertEqual( + get_trace_export_result(ExportResult.FAILED_NOT_RETRYABLE), + SpanExportResult.FAILED_NOT_RETRYABLE, + ) + self.assertEqual( + get_trace_export_result(ExportResult.FAILED_RETRYABLE), + SpanExportResult.FAILED_RETRYABLE, + ) + self.assertEqual(get_trace_export_result(None), None) + + def test_get_metrics_export_result(self): + self.assertEqual( + get_metrics_export_result(ExportResult.SUCCESS), + MetricsExportResult.SUCCESS, + ) + self.assertEqual( + get_metrics_export_result(ExportResult.FAILED_NOT_RETRYABLE), + MetricsExportResult.FAILED_NOT_RETRYABLE, + ) + self.assertEqual( + get_metrics_export_result(ExportResult.FAILED_RETRYABLE), + MetricsExportResult.FAILED_RETRYABLE, + ) + self.assertEqual(get_metrics_export_result(None), None) + + +class MockResponse: + def __init__(self, status_code, text): + self.status_code = status_code + self.text = text diff --git a/azure_monitor/tests/test_options.py b/azure_monitor/tests/test_options.py new file mode 100644 index 0000000..89b4368 --- /dev/null +++ b/azure_monitor/tests/test_options.py @@ -0,0 +1,270 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import os +import unittest + +from azure_monitor.options import ExporterOptions + + +# pylint: disable=too-many-public-methods +class TestOptions(unittest.TestCase): + def setUp(self): + os.environ.clear() + self._valid_instrumentation_key = ( + "1234abcd-5678-4efa-8abc-1234567890ab" + ) + + def test_validate_instrumentation_key(self): + options = ExporterOptions( + instrumentation_key=self._valid_instrumentation_key + ) + self.assertEqual( + options.instrumentation_key, self._valid_instrumentation_key + ) + + def test_invalid_key_none(self): + self.assertRaises( + ValueError, lambda: ExporterOptions(instrumentation_key=None) + ) + + def test_invalid_key_empty(self): + self.assertRaises( + ValueError, lambda: ExporterOptions(instrumentation_key="") + ) + + def test_invalid_key_prefix(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + instrumentation_key="test1234abcd-5678-4efa-8abc-1234567890ab" + ), + ) + + def test_invalid_key_suffix(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + instrumentation_key="1234abcd-5678-4efa-8abc-1234567890abtest" + ), + ) + + def test_invalid_key_length(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + instrumentation_key="1234abcd-5678-4efa-8abc-12234567890ab" + ), + ) + + def test_invalid_key_dashes(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + instrumentation_key="1234abcda5678-4efa-8abc-1234567890ab" + ), + ) + + def test_invalid_key_section1_length(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + instrumentation_key="1234abcda-678-4efa-8abc-1234567890ab" + ), + ) + + def test_invalid_key_section2_length(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + instrumentation_key="1234abcd-678-a4efa-8abc-1234567890ab" + ), + ) + + def test_invalid_key_section3_length(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + instrumentation_key="1234abcd-6789-4ef-8cabc-1234567890ab" + ), + ) + + def test_invalid_key_section4_length(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + instrumentation_key="1234abcd-678-4efa-8bc-11234567890ab" + ), + ) + + def test_invalid_key_section5_length(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + instrumentation_key="234abcd-678-4efa-8abc-11234567890ab" + ), + ) + + def test_invalid_key_section1_hex(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + instrumentation_key="x234abcd-5678-4efa-8abc-1234567890ab" + ), + ) + + def test_invalid_key_section2_hex(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + instrumentation_key="1234abcd-x678-4efa-8abc-1234567890ab" + ), + ) + + def test_invalid_key_section3_hex(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + instrumentation_key="1234abcd-5678-4xfa-8abc-1234567890ab" + ), + ) + + def test_invalid_key_section4_hex(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + instrumentation_key="1234abcd-5678-4xfa-8abc-1234567890ab" + ), + ) + + def test_invalid_key_section5_hex(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + instrumentation_key="1234abcd-5678-4xfa-8abc-1234567890ab" + ), + ) + + def test_invalid_key_version(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + instrumentation_key="1234abcd-5678-6efa-8abc-1234567890ab" + ), + ) + + def test_invalid_key_variant(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + instrumentation_key="1234abcd-5678-4efa-2abc-1234567890ab" + ), + ) + + def test_process_options_ikey_code_cs(self): + os.environ[ + "APPLICATIONINSIGHTS_CONNECTION_STRING" + ] = "Authorization=ikey;InstrumentationKey=789" + os.environ["APPINSIGHTS_INSTRUMENTATIONKEY"] = "101112" + options = ExporterOptions( + connection_string="Authorization=ikey;InstrumentationKey=" + + self._valid_instrumentation_key, + instrumentation_key="456", + ) + self.assertEqual( + options.instrumentation_key, self._valid_instrumentation_key + ) + + def test_process_options_ikey_code_ikey(self): + os.environ[ + "APPLICATIONINSIGHTS_CONNECTION_STRING" + ] = "Authorization=ikey;InstrumentationKey=789" + os.environ["APPINSIGHTS_INSTRUMENTATIONKEY"] = "101112" + options = ExporterOptions( + connection_string=None, + instrumentation_key=self._valid_instrumentation_key, + ) + self.assertEqual( + options.instrumentation_key, self._valid_instrumentation_key + ) + + def test_process_options_ikey_env_cs(self): + os.environ["APPLICATIONINSIGHTS_CONNECTION_STRING"] = ( + "Authorization=ikey;InstrumentationKey=" + + self._valid_instrumentation_key + ) + os.environ["APPINSIGHTS_INSTRUMENTATIONKEY"] = "101112" + options = ExporterOptions( + connection_string=None, instrumentation_key=None + ) + self.assertEqual( + options.instrumentation_key, self._valid_instrumentation_key + ) + + def test_process_options_ikey_env_ikey(self): + os.environ[ + "APPINSIGHTS_INSTRUMENTATIONKEY" + ] = self._valid_instrumentation_key + options = ExporterOptions( + connection_string=None, instrumentation_key=None + ) + self.assertEqual( + options.instrumentation_key, self._valid_instrumentation_key + ) + + def test_process_options_endpoint_code_cs(self): + os.environ[ + "APPLICATIONINSIGHTS_CONNECTION_STRING" + ] = "Authorization=ikey;IngestionEndpoint=456;InstrumentationKey=" + options = ExporterOptions( + connection_string="Authorization=ikey;IngestionEndpoint=123", + instrumentation_key=self._valid_instrumentation_key, + ) + self.assertEqual(options.endpoint, "123/v2/track") + + def test_process_options_endpoint_env_cs(self): + os.environ[ + "APPLICATIONINSIGHTS_CONNECTION_STRING" + ] = "Authorization=ikey;IngestionEndpoint=456" + options = ExporterOptions( + connection_string=None, + instrumentation_key=self._valid_instrumentation_key, + ) + self.assertEqual(options.endpoint, "456/v2/track") + + def test_process_options_endpoint_default(self): + options = ExporterOptions( + connection_string=None, + instrumentation_key=self._valid_instrumentation_key, + ) + self.assertEqual( + options.endpoint, "https://dc.services.visualstudio.com/v2/track" + ) + + def test_parse_connection_string_invalid(self): + self.assertRaises( + ValueError, lambda: ExporterOptions(connection_string="asd") + ) + + def test_parse_connection_string_invalid_auth(self): + self.assertRaises( + ValueError, + lambda: ExporterOptions( + connection_string="Authorization=asd", + instrumentation_key=self._valid_instrumentation_key, + ), + ) + + def test_parse_connection_string_suffix(self): + options = ExporterOptions( + connection_string="Authorization=ikey;EndpointSuffix=123;Location=US", + instrumentation_key=self._valid_instrumentation_key, + ) + self.assertEqual(options.endpoint, "https://US.dc.123/v2/track") + + def test_parse_connection_string_suffix_no_location(self): + options = ExporterOptions( + connection_string="Authorization=ikey;EndpointSuffix=123", + instrumentation_key=self._valid_instrumentation_key, + ) + self.assertEqual(options.endpoint, "https://dc.123/v2/track") diff --git a/azure_monitor/tests/test_protocol.py b/azure_monitor/tests/test_protocol.py new file mode 100644 index 0000000..f4e6d90 --- /dev/null +++ b/azure_monitor/tests/test_protocol.py @@ -0,0 +1,120 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import unittest + +from azure_monitor import protocol + + +class TestProtocol(unittest.TestCase): + def test_object(self): + data = protocol.BaseObject() + self.assertEqual(repr(data), "{}") + + def test_data(self): + data = protocol.Data() + self.assertIsNone(data.base_data) + self.assertIsNone(data.base_type) + + def test_data_point(self): + data = protocol.DataPoint() + self.assertEqual(data.ns, "") + + def test_envelope(self): + data = protocol.Envelope() + self.assertEqual(data.ver, 1) + + def test_event(self): + data = protocol.Event() + self.assertEqual(data.ver, 2) + + def test_event_to_dict(self): + data = protocol.Event() + to_dict = { + "ver": 2, + "name": "", + "properties": None, + "measurements": None, + } + self.assertEqual(data.to_dict(), to_dict) + + def test_exception_details(self): + data = protocol.ExceptionDetails() + self.assertEqual(data.id, None) + + def test_exception_details_to_dict(self): + data = protocol.ExceptionDetails() + to_dict = { + "id": None, + "outerId": None, + "typeName": None, + "message": None, + "hasFullStack ": None, + "stack": None, + "parsedStack": None, + } + self.assertEqual(data.to_dict(), to_dict) + + def test_exception_data(self): + data = protocol.ExceptionData() + self.assertEqual(data.ver, 2) + + def test_exception_data_details(self): + details = protocol.ExceptionDetails() + data = protocol.ExceptionData(exceptions=[details]) + self.assertEqual(len(data.exceptions), 1) + + def test_exception_data_to_dict(self): + data = protocol.ExceptionData() + to_dict = { + "ver": 2, + "exceptions": [], + "severityLevel": None, + "problemId": None, + "properties": None, + "measurements": None, + } + self.assertEqual(data.to_dict(), to_dict) + + def test_message(self): + data = protocol.Message() + self.assertEqual(data.ver, 2) + + def test_message_to_dict(self): + data = protocol.Message() + to_dict = { + "ver": 2, + "message": "", + "severityLevel": None, + "properties": None, + "measurements": None, + } + self.assertEqual(data.to_dict(), to_dict) + + def test_metric_data(self): + data = protocol.MetricData() + self.assertEqual(data.ver, 2) + + def test_remote_dependency(self): + data = protocol.RemoteDependency() + self.assertEqual(data.ver, 2) + + def test_request(self): + data = protocol.Request() + self.assertEqual(data.ver, 2) + + def test_request_to_dict(self): + data = protocol.Request() + to_dict = { + "ver": 2, + "id": "", + "duration": "", + "responseCode": "", + "success": True, + "source": None, + "name": None, + "url": None, + "properties": None, + "measurements": None, + } + self.assertEqual(data.to_dict(), to_dict) diff --git a/azure_monitor/tests/test_storage.py b/azure_monitor/tests/test_storage.py new file mode 100644 index 0000000..a24b49a --- /dev/null +++ b/azure_monitor/tests/test_storage.py @@ -0,0 +1,134 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import os +import shutil +import unittest +from unittest import mock + +from azure_monitor.storage import ( + LocalFileBlob, + LocalFileStorage, + _now, + _seconds, +) + +TEST_FOLDER = os.path.abspath(".test.storage") + + +# pylint: disable=invalid-name +def setUpModule(): + os.makedirs(TEST_FOLDER) + + +# pylint: disable=invalid-name +def tearDownModule(): + shutil.rmtree(TEST_FOLDER) + + +def throw(exc_type, *args, **kwargs): + def func(*_args, **_kwargs): + raise exc_type(*args, **kwargs) + + return func + + +class TestLocalFileBlob(unittest.TestCase): + def test_delete(self): + blob = LocalFileBlob(os.path.join(TEST_FOLDER, "foobar")) + blob.delete(silent=True) + self.assertRaises(Exception, blob.delete) + self.assertRaises(Exception, lambda: blob.delete(silent=False)) + + def test_get(self): + blob = LocalFileBlob(os.path.join(TEST_FOLDER, "foobar")) + self.assertIsNone(blob.get(silent=True)) + self.assertRaises(Exception, blob.get) + self.assertRaises(Exception, lambda: blob.get(silent=False)) + + def test_put_error(self): + blob = LocalFileBlob(os.path.join(TEST_FOLDER, "foobar")) + with mock.patch("os.rename", side_effect=throw(Exception)): + self.assertRaises(Exception, lambda: blob.put([1, 2, 3])) + + def test_put_without_lease(self): + blob = LocalFileBlob(os.path.join(TEST_FOLDER, "foobar.blob")) + test_input = (1, 2, 3) + blob.delete(silent=True) + blob.put(test_input) + self.assertEqual(blob.get(), test_input) + + def test_put_with_lease(self): + blob = LocalFileBlob(os.path.join(TEST_FOLDER, "foobar.blob")) + test_input = (1, 2, 3) + blob.delete(silent=True) + blob.put(test_input, lease_period=0.01) + blob.lease(0.01) + self.assertEqual(blob.get(), test_input) + + def test_lease_error(self): + blob = LocalFileBlob(os.path.join(TEST_FOLDER, "foobar.blob")) + blob.delete(silent=True) + self.assertEqual(blob.lease(0.01), None) + + +# pylint: disable=protected-access +class TestLocalFileStorage(unittest.TestCase): + def test_get_nothing(self): + with LocalFileStorage(os.path.join(TEST_FOLDER, "test", "a")) as stor: + pass + with LocalFileStorage(os.path.join(TEST_FOLDER, "test")) as stor: + self.assertIsNone(stor.get()) + + def test_get(self): + now = _now() + with LocalFileStorage(os.path.join(TEST_FOLDER, "foo")) as stor: + stor.put((1, 2, 3), lease_period=10) + with mock.patch("azure_monitor.storage._now") as m: + m.return_value = now - _seconds(30 * 24 * 60 * 60) + stor.put((1, 2, 3)) + stor.put((1, 2, 3), lease_period=10) + with mock.patch("os.rename"): + stor.put((1, 2, 3)) + with mock.patch("os.rename"): + stor.put((1, 2, 3)) + with mock.patch("os.remove", side_effect=throw(Exception)): + with mock.patch("os.rename", side_effect=throw(Exception)): + self.assertIsNone(stor.get()) + self.assertIsNone(stor.get()) + + def test_put(self): + test_input = (1, 2, 3) + with LocalFileStorage(os.path.join(TEST_FOLDER, "bar")) as stor: + stor.put(test_input) + self.assertEqual(stor.get().get(), test_input) + with LocalFileStorage(os.path.join(TEST_FOLDER, "bar")) as stor: + self.assertEqual(stor.get().get(), test_input) + with mock.patch("os.rename", side_effect=throw(Exception)): + self.assertIsNone(stor.put(test_input, silent=True)) + self.assertRaises(Exception, lambda: stor.put(test_input)) + + def test_maintanence_routine(self): + with mock.patch("os.makedirs") as m: + m.return_value = None + self.assertRaises( + Exception, + lambda: LocalFileStorage(os.path.join(TEST_FOLDER, "baz")), + ) + with mock.patch("os.makedirs", side_effect=throw(Exception)): + self.assertRaises( + Exception, + lambda: LocalFileStorage(os.path.join(TEST_FOLDER, "baz")), + ) + with mock.patch("os.listdir", side_effect=throw(Exception)): + self.assertRaises( + Exception, + lambda: LocalFileStorage(os.path.join(TEST_FOLDER, "baz")), + ) + with LocalFileStorage(os.path.join(TEST_FOLDER, "baz")) as stor: + with mock.patch("os.listdir", side_effect=throw(Exception)): + stor._maintenance_routine(silent=True) + self.assertRaises(Exception, stor._maintenance_routine) + with mock.patch("os.path.isdir", side_effect=throw(Exception)): + stor._maintenance_routine(silent=True) + self.assertRaises(Exception, stor._maintenance_routine) diff --git a/azure_monitor/tests/test_utils.py b/azure_monitor/tests/test_utils.py new file mode 100644 index 0000000..cac3bc8 --- /dev/null +++ b/azure_monitor/tests/test_utils.py @@ -0,0 +1,24 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import os +import unittest + +from azure_monitor import utils + + +class TestUtils(unittest.TestCase): + def setUp(self): + os.environ.clear() + self._valid_instrumentation_key = ( + "1234abcd-5678-4efa-8abc-1234567890ab" + ) + + def test_nanoseconds_to_duration(self): + ns_to_duration = utils.ns_to_duration + self.assertEqual(ns_to_duration(0), "0.00:00:00.000") + self.assertEqual(ns_to_duration(1000000), "0.00:00:00.001") + self.assertEqual(ns_to_duration(1000000000), "0.00:00:01.000") + self.assertEqual(ns_to_duration(60 * 1000000000), "0.00:01:00.000") + self.assertEqual(ns_to_duration(3600 * 1000000000), "0.01:00:00.000") + self.assertEqual(ns_to_duration(86400 * 1000000000), "1.00:00:00.000") diff --git a/azure_monitor/tests/trace/__init__.py b/azure_monitor/tests/trace/__init__.py new file mode 100644 index 0000000..5b7f7a9 --- /dev/null +++ b/azure_monitor/tests/trace/__init__.py @@ -0,0 +1,2 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. diff --git a/azure_monitor/tests/trace/test_trace.py b/azure_monitor/tests/trace/test_trace.py new file mode 100644 index 0000000..3e3f161 --- /dev/null +++ b/azure_monitor/tests/trace/test_trace.py @@ -0,0 +1,899 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + + +import json +import os +import shutil +import unittest +from unittest import mock + +# pylint: disable=import-error +from opentelemetry.sdk.trace import Span +from opentelemetry.sdk.trace.export import SpanExportResult +from opentelemetry.trace import Link, SpanContext, SpanKind +from opentelemetry.trace.status import Status, StatusCanonicalCode + +from azure_monitor.export import ExportResult +from azure_monitor.export.trace import AzureMonitorSpanExporter +from azure_monitor.options import ExporterOptions + +TEST_FOLDER = os.path.abspath(".test.exporter.trace") +STORAGE_PATH = os.path.join(TEST_FOLDER) + + +# pylint: disable=invalid-name +def setUpModule(): + os.makedirs(TEST_FOLDER) + + +# pylint: disable=invalid-name +def tearDownModule(): + shutil.rmtree(TEST_FOLDER) + + +def throw(exc_type, *args, **kwargs): + def func(*_args, **_kwargs): + raise exc_type(*args, **kwargs) + + return func + + +# pylint: disable=import-error +# pylint: disable=protected-access +# pylint: disable=too-many-lines +class TestAzureExporter(unittest.TestCase): + @classmethod + def setUpClass(cls): + os.environ.clear() + os.environ[ + "APPINSIGHTS_INSTRUMENTATIONKEY" + ] = "1234abcd-5678-4efa-8abc-1234567890ab" + cls._exporter = AzureMonitorSpanExporter(storage_path=STORAGE_PATH) + + def setUp(self): + for filename in os.listdir(STORAGE_PATH): + file_path = os.path.join(STORAGE_PATH, filename) + try: + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) + elif os.path.isdir(file_path): + shutil.rmtree(file_path) + except OSError as e: + print("Failed to delete %s. Reason: %s" % (file_path, e)) + + def test_constructor(self): + """Test the constructor.""" + exporter = AzureMonitorSpanExporter( + instrumentation_key="4321abcd-5678-4efa-8abc-1234567890ab", + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + self.assertIsInstance(exporter.options, ExporterOptions) + self.assertEqual( + exporter.options.instrumentation_key, + "4321abcd-5678-4efa-8abc-1234567890ab", + ) + self.assertEqual( + exporter.options.storage_path, os.path.join(TEST_FOLDER, self.id()) + ) + + def test_export_empty(self): + exporter = self._exporter + exporter.export([]) + self.assertEqual(len(os.listdir(exporter.storage.path)), 0) + + def test_export_failure(self): + exporter = self._exporter + with mock.patch( + "azure_monitor.export.trace.AzureMonitorSpanExporter._transmit" + ) as transmit: # noqa: E501 + test_span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557338, + ), + ) + test_span.start() + test_span.end() + transmit.return_value = ExportResult.FAILED_RETRYABLE + exporter.export([test_span]) + self.assertEqual(len(os.listdir(exporter.storage.path)), 1) + self.assertIsNone(exporter.storage.get()) + + def test_export_success(self): + exporter = self._exporter + test_span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557338, + ), + ) + test_span.start() + test_span.end() + with mock.patch( + "azure_monitor.export.trace.AzureMonitorSpanExporter._transmit" + ) as transmit: # noqa: E501 + transmit.return_value = ExportResult.SUCCESS + storage_mock = mock.Mock() + exporter._transmit_from_storage = storage_mock + exporter.export([test_span]) + self.assertEqual(storage_mock.call_count, 1) + self.assertEqual(len(os.listdir(exporter.storage.path)), 0) + + @mock.patch("azure_monitor.export.trace.logger") + def test_export_exception(self, logger_mock): + test_span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557338, + ), + ) + test_span.start() + test_span.end() + exporter = self._exporter + with mock.patch( + "azure_monitor.export.trace.AzureMonitorSpanExporter._transmit", + throw(Exception), + ): # noqa: E501 + result = exporter.export([test_span]) + self.assertEqual(result, SpanExportResult.FAILED_NOT_RETRYABLE) + self.assertEqual(logger_mock.exception.called, True) + + def test_export_not_retryable(self): + exporter = self._exporter + test_span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557338, + ), + ) + test_span.start() + test_span.end() + with mock.patch( + "azure_monitor.export.trace.AzureMonitorSpanExporter._transmit" + ) as transmit: # noqa: E501 + transmit.return_value = ExportResult.FAILED_NOT_RETRYABLE + result = exporter.export([test_span]) + self.assertEqual(result, SpanExportResult.FAILED_NOT_RETRYABLE) + + def test_span_to_envelope_none(self): + exporter = self._exporter + self.assertIsNone(exporter._span_to_envelope(None)) + + # pylint: disable=too-many-statements + def test_span_to_envelope(self): + exporter = AzureMonitorSpanExporter( + instrumentation_key="12345678-1234-5678-abcd-12345678abcd", + storage_path=os.path.join(TEST_FOLDER, self.id()), + ) + + parent_span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557338, + ), + ) + + start_time = 1575494316027613500 + end_time = start_time + 1001000000 + + # SpanKind.CLIENT HTTP + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "http", + "http.method": "GET", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + "http.status_code": 200, + }, + events=None, + links=[], + kind=SpanKind.CLIENT, + ) + span.start(start_time=start_time) + span.end(end_time=end_time) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + envelope = exporter._span_to_envelope(span) + self.assertEqual(envelope.ikey, "12345678-1234-5678-abcd-12345678abcd") + self.assertEqual( + envelope.name, "Microsoft.ApplicationInsights.RemoteDependency" + ) + self.assertEqual( + envelope.tags["ai.operation.parentId"], "a6f5d48acb4d31da" + ) + self.assertEqual( + envelope.tags["ai.operation.id"], + "1bbd944a73a05d89eab5d3740a213ee7", + ) + self.assertEqual(envelope.time, "2019-12-04T21:18:36.027613Z") + self.assertEqual(envelope.data.base_data.name, "GET//wiki/Rabbit") + self.assertEqual( + envelope.data.base_data.data, + "https://www.wikipedia.org/wiki/Rabbit", + ) + self.assertEqual(envelope.data.base_data.target, "www.wikipedia.org") + self.assertEqual(envelope.data.base_data.id, "a6f5d48acb4d31d9") + self.assertEqual(envelope.data.base_data.result_code, "200") + self.assertEqual(envelope.data.base_data.duration, "0.00:00:01.001") + self.assertEqual(envelope.data.base_data.type, "HTTP") + self.assertEqual(envelope.data.base_type, "RemoteDependencyData") + + # SpanKind.CLIENT unknown type + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={}, + events=None, + links=[], + kind=SpanKind.CLIENT, + ) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + span.start(start_time=start_time) + span.end(end_time=end_time) + envelope = exporter._span_to_envelope(span) + self.assertEqual(envelope.ikey, "12345678-1234-5678-abcd-12345678abcd") + self.assertEqual( + envelope.name, "Microsoft.ApplicationInsights.RemoteDependency" + ) + self.assertEqual( + envelope.tags["ai.operation.parentId"], "a6f5d48acb4d31da" + ) + self.assertEqual( + envelope.tags["ai.operation.id"], + "1bbd944a73a05d89eab5d3740a213ee7", + ) + self.assertEqual(envelope.time, "2019-12-04T21:18:36.027613Z") + self.assertEqual(envelope.data.base_data.name, "test") + self.assertEqual(envelope.data.base_data.id, "a6f5d48acb4d31d9") + self.assertEqual(envelope.data.base_data.duration, "0.00:00:01.001") + self.assertEqual(envelope.data.base_data.type, None) + self.assertEqual(envelope.data.base_type, "RemoteDependencyData") + + # SpanKind.CLIENT missing method + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "http", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + "http.status_code": 200, + }, + events=None, + links=[], + kind=SpanKind.CLIENT, + ) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + span.start(start_time=start_time) + span.end(end_time=end_time) + envelope = exporter._span_to_envelope(span) + self.assertEqual(envelope.ikey, "12345678-1234-5678-abcd-12345678abcd") + self.assertEqual( + envelope.name, "Microsoft.ApplicationInsights.RemoteDependency" + ) + self.assertEqual( + envelope.tags["ai.operation.parentId"], "a6f5d48acb4d31da" + ) + self.assertEqual( + envelope.tags["ai.operation.id"], + "1bbd944a73a05d89eab5d3740a213ee7", + ) + self.assertEqual(envelope.time, "2019-12-04T21:18:36.027613Z") + self.assertEqual(envelope.data.base_data.name, "test") + self.assertEqual( + envelope.data.base_data.data, + "https://www.wikipedia.org/wiki/Rabbit", + ) + self.assertEqual(envelope.data.base_data.target, "www.wikipedia.org") + self.assertEqual(envelope.data.base_data.id, "a6f5d48acb4d31d9") + self.assertEqual(envelope.data.base_data.result_code, "200") + self.assertEqual(envelope.data.base_data.duration, "0.00:00:01.001") + self.assertEqual(envelope.data.base_data.type, "HTTP") + self.assertEqual(envelope.data.base_type, "RemoteDependencyData") + + # SpanKind.SERVER HTTP - 200 request + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "http", + "http.method": "GET", + "http.path": "/wiki/Rabbit", + "http.route": "/wiki/Rabbit", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + "http.status_code": 200, + }, + events=None, + links=[], + kind=SpanKind.SERVER, + ) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + span.start(start_time=start_time) + span.end(end_time=end_time) + envelope = exporter._span_to_envelope(span) + self.assertEqual(envelope.ikey, "12345678-1234-5678-abcd-12345678abcd") + self.assertEqual( + envelope.name, "Microsoft.ApplicationInsights.Request" + ) + self.assertEqual( + envelope.tags["ai.operation.parentId"], "a6f5d48acb4d31da" + ) + self.assertEqual( + envelope.tags["ai.operation.id"], + "1bbd944a73a05d89eab5d3740a213ee7", + ) + self.assertEqual( + envelope.tags["ai.operation.name"], "GET /wiki/Rabbit" + ) + self.assertEqual(envelope.time, "2019-12-04T21:18:36.027613Z") + self.assertEqual(envelope.data.base_data.id, "a6f5d48acb4d31d9") + self.assertEqual(envelope.data.base_data.duration, "0.00:00:01.001") + self.assertEqual(envelope.data.base_data.response_code, "200") + self.assertEqual(envelope.data.base_data.name, "GET /wiki/Rabbit") + self.assertEqual(envelope.data.base_data.success, True) + self.assertEqual( + envelope.data.base_data.url, + "https://www.wikipedia.org/wiki/Rabbit", + ) + self.assertEqual(envelope.data.base_type, "RequestData") + + # SpanKind.SERVER HTTP - Failed request + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "http", + "http.method": "GET", + "http.path": "/wiki/Rabbit", + "http.route": "/wiki/Rabbit", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + "http.status_code": 400, + }, + events=None, + links=[], + kind=SpanKind.SERVER, + ) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + span.start(start_time=start_time) + span.end(end_time=end_time) + envelope = exporter._span_to_envelope(span) + self.assertEqual(envelope.ikey, "12345678-1234-5678-abcd-12345678abcd") + self.assertEqual( + envelope.name, "Microsoft.ApplicationInsights.Request" + ) + self.assertEqual( + envelope.tags["ai.operation.parentId"], "a6f5d48acb4d31da" + ) + self.assertEqual( + envelope.tags["ai.operation.id"], + "1bbd944a73a05d89eab5d3740a213ee7", + ) + self.assertEqual( + envelope.tags["ai.operation.name"], "GET /wiki/Rabbit" + ) + self.assertEqual(envelope.time, "2019-12-04T21:18:36.027613Z") + self.assertEqual(envelope.data.base_data.id, "a6f5d48acb4d31d9") + self.assertEqual(envelope.data.base_data.duration, "0.00:00:01.001") + self.assertEqual(envelope.data.base_data.response_code, "400") + self.assertEqual(envelope.data.base_data.name, "GET /wiki/Rabbit") + self.assertEqual(envelope.data.base_data.success, False) + self.assertEqual( + envelope.data.base_data.url, + "https://www.wikipedia.org/wiki/Rabbit", + ) + self.assertEqual(envelope.data.base_type, "RequestData") + + # SpanKind.SERVER unknown type + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "http", + "http.method": "GET", + "http.path": "/wiki/Rabbit", + "http.route": "/wiki/Rabbit", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + "http.status_code": 400, + }, + events=None, + links=[], + kind=SpanKind.SERVER, + ) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + span.start(start_time=start_time) + span.end(end_time=end_time) + envelope = exporter._span_to_envelope(span) + self.assertEqual(envelope.ikey, "12345678-1234-5678-abcd-12345678abcd") + self.assertEqual( + envelope.name, "Microsoft.ApplicationInsights.Request" + ) + self.assertEqual( + envelope.tags["ai.operation.parentId"], "a6f5d48acb4d31da" + ) + self.assertEqual( + envelope.tags["ai.operation.id"], + "1bbd944a73a05d89eab5d3740a213ee7", + ) + self.assertEqual(envelope.time, "2019-12-04T21:18:36.027613Z") + self.assertEqual(envelope.data.base_data.id, "a6f5d48acb4d31d9") + self.assertEqual(envelope.data.base_data.duration, "0.00:00:01.001") + self.assertEqual(envelope.data.base_type, "RequestData") + + # SpanKind.INTERNAL + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=None, + sampler=None, + trace_config=None, + resource=None, + attributes={"key1": "value1"}, + events=None, + links=[], + kind=SpanKind.INTERNAL, + ) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + span.start(start_time=start_time) + span.end(end_time=end_time) + envelope = exporter._span_to_envelope(span) + self.assertEqual(envelope.ikey, "12345678-1234-5678-abcd-12345678abcd") + self.assertEqual( + envelope.name, "Microsoft.ApplicationInsights.RemoteDependency" + ) + self.assertRaises( + KeyError, lambda: envelope.tags["ai.operation.parentId"] + ) + self.assertEqual( + envelope.tags["ai.operation.id"], + "1bbd944a73a05d89eab5d3740a213ee7", + ) + self.assertEqual(envelope.time, "2019-12-04T21:18:36.027613Z") + self.assertEqual(envelope.data.base_data.name, "test") + self.assertEqual(envelope.data.base_data.duration, "0.00:00:01.001") + self.assertEqual(envelope.data.base_data.id, "a6f5d48acb4d31d9") + self.assertEqual(envelope.data.base_data.type, "InProc") + self.assertEqual(envelope.data.base_type, "RemoteDependencyData") + + # Attributes + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "http", + "http.method": "GET", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + "http.status_code": 200, + "test": "asd", + }, + events=None, + links=[], + kind=SpanKind.CLIENT, + ) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + span.start(start_time=start_time) + span.end(end_time=end_time) + envelope = exporter._span_to_envelope(span) + self.assertEqual(len(envelope.data.base_data.properties), 2) + self.assertEqual( + envelope.data.base_data.properties["component"], "http" + ) + self.assertEqual(envelope.data.base_data.properties["test"], "asd") + + # Links + links = [] + links.append( + Link( + context=SpanContext( + trace_id=36873507687745823477771305566750195432, + span_id=12030755672171557338, + ) + ) + ) + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "http", + "http.method": "GET", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + "http.status_code": 200, + }, + events=None, + links=links, + kind=SpanKind.CLIENT, + ) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + span.start(start_time=start_time) + span.end(end_time=end_time) + envelope = exporter._span_to_envelope(span) + self.assertEqual(len(envelope.data.base_data.properties), 2) + json_dict = json.loads( + envelope.data.base_data.properties["_MS.links"] + )[0] + self.assertEqual(json_dict["id"], "a6f5d48acb4d31da") + + # Status + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "http", + "http.method": "GET", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + "http.status_code": 500, + }, + events=None, + links=[], + kind=SpanKind.SERVER, + ) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + span.start(start_time=start_time) + span.end(end_time=end_time) + envelope = exporter._span_to_envelope(span) + self.assertEqual(envelope.data.base_data.response_code, "500") + self.assertFalse(envelope.data.base_data.success) + + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "http", + "http.method": "GET", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + "http.status_code": 500, + }, + events=None, + links=[], + kind=SpanKind.CLIENT, + ) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + span.start(start_time=start_time) + span.end(end_time=end_time) + envelope = exporter._span_to_envelope(span) + self.assertEqual(envelope.data.base_data.result_code, "500") + self.assertFalse(envelope.data.base_data.success) + + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "http", + "http.method": "GET", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + }, + events=None, + links=[], + kind=SpanKind.SERVER, + ) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + span.start(start_time=start_time) + span.end(end_time=end_time) + envelope = exporter._span_to_envelope(span) + self.assertEqual(envelope.data.base_data.response_code, "0") + self.assertTrue(envelope.data.base_data.success) + + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "http", + "http.method": "GET", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + }, + events=None, + links=[], + kind=SpanKind.CLIENT, + ) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + span.start(start_time=start_time) + span.end(end_time=end_time) + envelope = exporter._span_to_envelope(span) + self.assertEqual(envelope.data.base_data.result_code, "0") + self.assertTrue(envelope.data.base_data.success) + + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "http", + "http.method": "GET", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + }, + events=None, + links=[], + kind=SpanKind.SERVER, + ) + span.start(start_time=start_time) + span.end(end_time=end_time) + span.status = Status(canonical_code=StatusCanonicalCode.UNKNOWN) + envelope = exporter._span_to_envelope(span) + self.assertEqual(envelope.data.base_data.response_code, "2") + self.assertFalse(envelope.data.base_data.success) + + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "http", + "http.method": "GET", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + }, + events=None, + links=[], + kind=SpanKind.CLIENT, + ) + span.start(start_time=start_time) + span.end(end_time=end_time) + span.status = Status(canonical_code=StatusCanonicalCode.UNKNOWN) + envelope = exporter._span_to_envelope(span) + self.assertEqual(envelope.data.base_data.result_code, "2") + self.assertFalse(envelope.data.base_data.success) + + # Server route attribute + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "HTTP", + "http.method": "GET", + "http.route": "/wiki/Rabbit", + "http.path": "/wiki/Rabbitz", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + "http.status_code": 400, + }, + events=None, + links=[], + kind=SpanKind.SERVER, + ) + span.start(start_time=start_time) + span.end(end_time=end_time) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + envelope = exporter._span_to_envelope(span) + self.assertEqual( + envelope.data.base_data.properties["request.name"], + "GET /wiki/Rabbit", + ) + self.assertEqual( + envelope.data.base_data.properties["request.url"], + "https://www.wikipedia.org/wiki/Rabbit", + ) + + # Server method attribute missing + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "HTTP", + "http.path": "/wiki/Rabbitz", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + "http.status_code": 400, + }, + events=None, + links=[], + kind=SpanKind.SERVER, + ) + span.start(start_time=start_time) + span.end(end_time=end_time) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + envelope = exporter._span_to_envelope(span) + self.assertIsNone(envelope.data.base_data.name) + + # Server route attribute missing + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "HTTP", + "http.method": "GET", + "http.path": "/wiki/Rabbitz", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + "http.status_code": 400, + }, + events=None, + links=[], + kind=SpanKind.SERVER, + ) + span.start(start_time=start_time) + span.end(end_time=end_time) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + envelope = exporter._span_to_envelope(span) + self.assertEqual(envelope.data.base_data.name, "GET") + self.assertEqual( + envelope.data.base_data.properties["request.name"], + "GET /wiki/Rabbitz", + ) + self.assertEqual( + envelope.data.base_data.properties["request.url"], + "https://www.wikipedia.org/wiki/Rabbit", + ) + + # Server route and path attribute missing + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "HTTP", + "http.method": "GET", + "http.url": "https://www.wikipedia.org/wiki/Rabbit", + "http.status_code": 400, + }, + events=None, + links=[], + kind=SpanKind.SERVER, + ) + span.start(start_time=start_time) + span.end(end_time=end_time) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + envelope = exporter._span_to_envelope(span) + self.assertIsNone( + envelope.data.base_data.properties.get("request.name") + ) + self.assertEqual( + envelope.data.base_data.properties["request.url"], + "https://www.wikipedia.org/wiki/Rabbit", + ) + + # Server http.url missing + span = Span( + name="test", + context=SpanContext( + trace_id=36873507687745823477771305566750195431, + span_id=12030755672171557337, + ), + parent=parent_span, + sampler=None, + trace_config=None, + resource=None, + attributes={ + "component": "HTTP", + "http.method": "GET", + "http.route": "/wiki/Rabbit", + "http.path": "/wiki/Rabbitz", + "http.status_code": 400, + }, + events=None, + links=[], + kind=SpanKind.SERVER, + ) + span.start(start_time=start_time) + span.end(end_time=end_time) + span.status = Status(canonical_code=StatusCanonicalCode.OK) + envelope = exporter._span_to_envelope(span) + self.assertIsNone(envelope.data.base_data.url) + self.assertIsNone( + envelope.data.base_data.properties.get("request.url") + ) diff --git a/dev-requirements.txt b/dev-requirements.txt new file mode 100644 index 0000000..3ffaba9 --- /dev/null +++ b/dev-requirements.txt @@ -0,0 +1,7 @@ +pylint~=2.3 +flake8~=3.7 +isort~=4.3 +black>=19.3b0,==19.* +mypy==0.740 +pytest!=5.2.3 +pytest-cov>=2.8 diff --git a/docs-requirements.txt b/docs-requirements.txt new file mode 100644 index 0000000..b9ffedb --- /dev/null +++ b/docs-requirements.txt @@ -0,0 +1,8 @@ +sphinx~=2.4 +sphinx-rtd-theme~=0.4 +sphinx-autodoc-typehints~=1.10.2 +# External +opentelemetry-api>= 0.5b0 +opentelemetry-sdk>= 0.5b0 +psutil>= 5.6.3 + diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..d4bb2cb --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/azure_monitor/export/export.base-exporter.rst b/docs/azure_monitor/export/export.base-exporter.rst new file mode 100644 index 0000000..12a3fb6 --- /dev/null +++ b/docs/azure_monitor/export/export.base-exporter.rst @@ -0,0 +1,8 @@ +Base Exporter +============= + + + +.. autoclass:: azure_monitor.export.BaseExporter + :members: + :undoc-members: diff --git a/docs/azure_monitor/export/export.metrics.rst b/docs/azure_monitor/export/export.metrics.rst new file mode 100644 index 0000000..04d3d26 --- /dev/null +++ b/docs/azure_monitor/export/export.metrics.rst @@ -0,0 +1,9 @@ +Metrics Exporter +================ + + + +.. automodule:: azure_monitor.export.metrics + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/azure_monitor/export/export.options.rst b/docs/azure_monitor/export/export.options.rst new file mode 100644 index 0000000..c54dc5f --- /dev/null +++ b/docs/azure_monitor/export/export.options.rst @@ -0,0 +1,9 @@ +Exporter Options +================ + + + +.. autoclass:: azure_monitor.options.ExporterOptions + :members: + + diff --git a/docs/azure_monitor/export/export.trace.rst b/docs/azure_monitor/export/export.trace.rst new file mode 100644 index 0000000..8761371 --- /dev/null +++ b/docs/azure_monitor/export/export.trace.rst @@ -0,0 +1,9 @@ +Span Exporter +============= + + + +.. automodule:: azure_monitor.export.trace + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/azure_monitor/export/main.rst b/docs/azure_monitor/export/main.rst new file mode 100644 index 0000000..c44ed09 --- /dev/null +++ b/docs/azure_monitor/export/main.rst @@ -0,0 +1,12 @@ +Exporters +========= + + +.. toctree:: + + export.trace + export.metrics + export.options + export.base-exporter + + diff --git a/docs/azure_monitor/sdk/auto-collection/auto-collection.standard-metrics.rst b/docs/azure_monitor/sdk/auto-collection/auto-collection.standard-metrics.rst new file mode 100644 index 0000000..f825c79 --- /dev/null +++ b/docs/azure_monitor/sdk/auto-collection/auto-collection.standard-metrics.rst @@ -0,0 +1,10 @@ +Standard Metrics +================ + + +.. automodule:: azure_monitor.sdk.auto_collection + :members: + :undoc-members: + + + diff --git a/docs/azure_monitor/sdk/auto-collection/main.rst b/docs/azure_monitor/sdk/auto-collection/main.rst new file mode 100644 index 0000000..c2bc1ec --- /dev/null +++ b/docs/azure_monitor/sdk/auto-collection/main.rst @@ -0,0 +1,9 @@ +Auto Collection +=============== + + +.. toctree:: + + auto-collection.standard-metrics + + diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..c21be60 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,81 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. + +import os +import sys +from os import listdir +from os.path import isdir, join + +source_dirs = [os.path.abspath("../azure_monitor/src/")] + + +sys.path[:0] = source_dirs + + +# -- Project information ----------------------------------------------------- + +project = "OpenTelemetry Azure Monitor Python" +copyright = "2020, Microsoft" +author = "Microsoft" + +# The full version, including alpha/beta/rc tags +release = "0.0.1b" + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + # API doc generation + "sphinx.ext.autodoc", + # Support for google-style docstrings + "sphinx.ext.napoleon", + # Infer types from hints instead of docstrings + "sphinx_autodoc_typehints", + # Add links to source from generated docs + "sphinx.ext.viewcode", + # Link to other sphinx docs + "sphinx.ext.intersphinx", + # Add a .nojekyll file to the generated HTML docs + # https://help.github.com/en/articles/files-that-start-with-an-underscore-are-missing + "sphinx.ext.githubpages", + # Support external links to different versions in the Github repo + "sphinx.ext.extlinks", +] + + +intersphinx_mapping = { + "python": ("https://opentelemetry-python.readthedocs.io/en/stable", None) +} + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "sphinx_rtd_theme" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..e360c22 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,38 @@ + +OpenTelemetry Azure Monitor Python +================================== + +The Python `OpenTelemetry `_ Azure Monitor library. + +.. image:: https://img.shields.io/gitter/room/Microsoft/azure-monitor-python + :target: https://gitter.im/Microsoft/azure-monitor-python + :alt: Gitter Chat + + +**Please note** that this library is currently in beta, and shouldn't be +used in production environments. + +Installation +------------ + +The package is available on PyPI, and can installed via pip: + +.. code-block:: sh + + pip install opentelemetry-azure-monitor-exporter + +.. toctree:: + :maxdepth: 1 + :caption: Documentation + :name: documentation + + azure_monitor/export/main + azure_monitor/sdk/auto-collection/main + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..922152e --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..a8f43fe --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,2 @@ +[tool.black] +line-length = 79 diff --git a/scripts/pylint.sh b/scripts/pylint.sh new file mode 100644 index 0000000..f757fac --- /dev/null +++ b/scripts/pylint.sh @@ -0,0 +1,13 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +set -ev + +# Run pylint on directories +function pylint_dir { + python -m pip install --upgrade pylint + pylint $(find azure_monitor -type f -name "*.py") + return $? +} + +pylint_dir diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..9966fa7 --- /dev/null +++ b/tox.ini @@ -0,0 +1,70 @@ +[tox] +skipsdist = True +skip_missing_interpreters = True +envlist = + py3{4,5,6,7,8}-test-{azure_monitor} + py3{4,5,6,7,8}-coverage + + lint + docs + +[travis] +python = + 3.8: py38, lint, docs + +[testenv] +deps = + -c dev-requirements.txt + test: pytest + coverage: pytest + coverage: pytest-cov + mypy: mypy + +changedir = + test-azure_monitor: azure_monitor/tests + +commands_pre = + python -m pip install -U pip setuptools + test-azure_monitor: pip install {toxinidir}/azure_monitor + coverage: pip install -e {toxinidir}/azure_monitor + +commands = + test: pytest {posargs} + coverage: coverage erase + coverage: pytest --ignore-glob=*/setup.py --cov azure_monitor --cov-append --cov-report term-missing + coverage: coverage report + +[testenv:lint] +basepython: python3.8 +recreate = True +deps = + -c dev-requirements.txt + pylint + flake8 + isort + black + +commands_pre = + pip install ./azure_monitor + +commands = + black . --diff --check + isort --diff --check-only --recursive . + flake8 + bash ./scripts/pylint.sh + +[testenv:docs] +deps = + -c docs-requirements.txt + sphinx + sphinx-rtd-theme + sphinx-autodoc-typehints + # External + opentelemetry-api + opentelemetry-sdk + psutil + +changedir = docs + +commands = + sphinx-build -E -a --keep-going -b html -T . _build/html