bWUp}xZee%_FF+E28Gr&LzQXejWB
zFF+E26#x_I#XCg22?r-%Yj0PYEp7nRKcP5}NPfXW>d
zAQS*X4L}sYk;G^K2?3k{u>_C=$Zl&HU|oxb9nw&M7hvPP797D5^YtB!^1A>lE_VEA
zWtT8;3G)qN)hY)JtNaxtw(!6H4J$lDHv|41;8=iuIsgd)i~wGMGznl`3oC$`4v;qk
yX22l>+=$W4)dl4JFr1FvuXH>&TgG!!vezfc^{4$!^Uogu0000
Date: Thu, 29 Aug 2024 18:53:33 -0700
Subject: [PATCH 10/31] Add attributes to docstring
---
ml_metadata/metadata_store/metadata_store.py | 8 +++++++-
1 file changed, 7 insertions(+), 1 deletion(-)
diff --git a/ml_metadata/metadata_store/metadata_store.py b/ml_metadata/metadata_store/metadata_store.py
index 96cfff948..9e0f7f7ea 100644
--- a/ml_metadata/metadata_store/metadata_store.py
+++ b/ml_metadata/metadata_store/metadata_store.py
@@ -53,7 +53,13 @@
@enum.unique
class OrderByField(enum.Enum):
- """Defines the available fields to order results in ListOperations."""
+ """Defines the available fields to order results in ListOperations.
+
+ Attributes:
+ CREATE_TIME: ``
+ ID: ``
+ UPDATE_TIME: ``
+ """
CREATE_TIME = (
metadata_store_pb2.ListOperationOptions.OrderByField.Field.CREATE_TIME)
From c3131accdf03e3db7ca3f002beea89d1166f5d59 Mon Sep 17 00:00:00 2001
From: smokestacklightnin
<125844868+smokestacklightnin@users.noreply.github.com>
Date: Thu, 29 Aug 2024 20:41:40 -0700
Subject: [PATCH 11/31] Temporary fix for unused import
---
ml_metadata/proto/__init__.py | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/ml_metadata/proto/__init__.py b/ml_metadata/proto/__init__.py
index d033691c6..b1eb7b4b9 100644
--- a/ml_metadata/proto/__init__.py
+++ b/ml_metadata/proto/__init__.py
@@ -35,8 +35,11 @@
MySQLDatabaseConfig,
SqliteMetadataSourceConfig,
)
-import ml_metadata.proto.metadata_store_service_pb2 as _
-import ml_metadata.proto.metadata_store_service_pb2_grpc as _
+from ml_metadata.proto import metadata_store_service_pb2
+from ml_metadata.proto import metadata_store_service_pb2_grpc
+
+del metadata_store_service_pb2
+del metadata_store_service_pb2_grpc
Artifact.__doc__ = """
An artifact represents an input or an output of individual steps in a ML
From 27905cae517829426dbf2ee1048776d23ba3b37e Mon Sep 17 00:00:00 2001
From: smokestacklightnin
<125844868+smokestacklightnin@users.noreply.github.com>
Date: Thu, 29 Aug 2024 21:15:48 -0700
Subject: [PATCH 12/31] Fill out Overview section
---
g3doc/api/mlmd/root.md | 24 +++++++++++++++++++++++-
1 file changed, 23 insertions(+), 1 deletion(-)
diff --git a/g3doc/api/mlmd/root.md b/g3doc/api/mlmd/root.md
index 4ce31d443..4a1890b72 100644
--- a/g3doc/api/mlmd/root.md
+++ b/g3doc/api/mlmd/root.md
@@ -1 +1,23 @@
-# MLMD
+# mlmd
+
+Init module for ML Metadata.
+
+## Modules
+
+[`errors`][ml_metadata.errors] module: Exception types for MLMD errors.
+
+[`proto`][ml_metadata.proto] module: ML Metadata proto module.
+
+## Classes
+
+[`class ListOptions`][ml_metadata.ListOptions]: Defines the available options when listing nodes.
+
+[`class MetadataStore`][ml_metadata.MetadataStore]: A store for the metadata.
+
+[`class OrderByField`][ml_metadata.OrderByField]: Defines the available fields to order results in ListOperations.
+
+## Functions
+
+[`downgrade_schema(...)`][ml_metadata.downgrade_schema]: Downgrades the db specified in the connection config to a schema version.
+
+
From 941dfe086d75b63e874e41223cfbf2b9628bac90 Mon Sep 17 00:00:00 2001
From: smokestacklightnin
<125844868+smokestacklightnin@users.noreply.github.com>
Date: Sat, 31 Aug 2024 19:19:58 -0700
Subject: [PATCH 13/31] Use indentation instead of slash character `\`
---
ml_metadata/metadata_store/metadata_store.py | 6 +++---
ml_metadata/metadata_store/mlmd_types.py | 6 +++---
2 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/ml_metadata/metadata_store/metadata_store.py b/ml_metadata/metadata_store/metadata_store.py
index 9e0f7f7ea..1bdc50b07 100644
--- a/ml_metadata/metadata_store/metadata_store.py
+++ b/ml_metadata/metadata_store/metadata_store.py
@@ -1046,9 +1046,9 @@ def get_artifacts_and_types_by_artifact_ids(
extra_options: ExtraOptions instance.
Returns:
- Artifacts with matching ids and ArtifactTypes which can be matched by\
- type_ids from Artifacts. Each ArtifactType contains id, name,\
- properties and custom_properties fields.
+ Artifacts with matching ids and ArtifactTypes which can be matched by
+ type_ids from Artifacts. Each ArtifactType contains id, name,
+ properties and custom_properties fields.
"""
del extra_options
request = metadata_store_service_pb2.GetArtifactsByIDRequest(
diff --git a/ml_metadata/metadata_store/mlmd_types.py b/ml_metadata/metadata_store/mlmd_types.py
index a9da4320f..93535762a 100644
--- a/ml_metadata/metadata_store/mlmd_types.py
+++ b/ml_metadata/metadata_store/mlmd_types.py
@@ -68,9 +68,9 @@ def __init__(self, type_name: str):
type_name: name of the desired system type.
Raises:
- NOT_FOUND: if 'type_name' is not found in the pre-loaded simple type list;\
- It also raises the corresponding error from wrapped LoadSimpleTypes util\
- method.
+ NOT_FOUND: if 'type_name' is not found in the pre-loaded simple type list;
+ It also raises the corresponding error from wrapped LoadSimpleTypes util
+ method.
"""
[types_str, error_message,
status_code] = metadata_store_serialized.LoadSimpleTypes()
From a5100166dfb603f440686d23aaabd8cfd4404d50 Mon Sep 17 00:00:00 2001
From: smokestacklightnin
<125844868+smokestacklightnin@users.noreply.github.com>
Date: Sat, 31 Aug 2024 21:08:38 -0700
Subject: [PATCH 14/31] Add overview from `mlmd.errors` api docs
---
g3doc/api/mlmd.errors/root.md | 45 +++++++++++++++++++++++++++++++++++
1 file changed, 45 insertions(+)
diff --git a/g3doc/api/mlmd.errors/root.md b/g3doc/api/mlmd.errors/root.md
index e69de29bb..04562c7fa 100644
--- a/g3doc/api/mlmd.errors/root.md
+++ b/g3doc/api/mlmd.errors/root.md
@@ -0,0 +1,45 @@
+# mlmd.errors
+
+Exception types for MLMD errors.
+
+## Classes
+
+[`class AbortedError`][ml_metadata.errors.AbortedError]: The operation was aborted, typically due to a concurrent action.
+
+[`class AlreadyExistsError`][ml_metadata.errors.AlreadyExistsError]: Raised when an entity that we attempted to create already exists.
+
+[`class CancelledError`][ml_metadata.errors.CancelledError]: Raised when an operation or step is cancelled.
+
+[`class DataLossError`][ml_metadata.errors.DataLossError]: Raised when unrecoverable data loss or corruption is encountered.
+
+[`class DeadlineExceededError`][ml_metadata.errors.DeadlineExceededError]: Raised when a deadline expires before an operation could complete.
+
+[`class FailedPreconditionError`][ml_metadata.errors.FailedPreconditionError]: Raised when the system is not in a state to execute an operation.
+
+[`class InternalError`][ml_metadata.errors.InternalError]: Raised when the system experiences an internal error.
+
+[`class InvalidArgumentError`][ml_metadata.errors.InvalidArgumentError]: Raised when an operation receives an invalid argument.
+
+[`class NotFoundError`][ml_metadata.errors.NotFoundError]: Raised when a requested entity was not found.
+
+[`class OutOfRangeError`][ml_metadata.errors.OutOfRangeError]: Raised when an operation iterates past the valid input range.
+
+[`class PermissionDeniedError`][ml_metadata.errors.PermissionDeniedError]: Raised when the caller does not have permission to run an operation.
+
+[`class ResourceExhaustedError`][ml_metadata.errors.ResourceExhaustedError]: Some resource has been exhausted.
+
+[`class StatusError`][ml_metadata.errors.StatusError]: A general error class that cast maps Status to typed errors.
+
+[`class UnauthenticatedError`][ml_metadata.errors.UnauthenticatedError]: The request does not have valid authentication credentials.
+
+[`class UnavailableError`][ml_metadata.errors.UnavailableError]: Raised when the runtime is currently unavailable.
+
+[`class UnimplementedError`][ml_metadata.errors.UnimplementedError]: Raised when an operation has not been implemented.
+
+[`class UnknownError`][ml_metadata.errors.UnknownError]: Raised when an operation failed reason is unknown.
+
+## Functions
+
+[`exception_type_from_error_code(...)`][ml_metadata.errors.exception_type_from_error_code]: Returns error class w.r.t. the error_code.
+
+[`make_exception(...)`][ml_metadata.errors.make_exception]: Makes an exception with the MLMD error code.
From 2649a31f351f476d95205235e8d4b253dc423b11 Mon Sep 17 00:00:00 2001
From: smokestacklightnin
<125844868+smokestacklightnin@users.noreply.github.com>
Date: Sat, 31 Aug 2024 21:10:29 -0700
Subject: [PATCH 15/31] Add `show_if_no_docstring` option to mlmd.errors
---
g3doc/api/mlmd.errors/mlmd.errors.md | 2 ++
1 file changed, 2 insertions(+)
diff --git a/g3doc/api/mlmd.errors/mlmd.errors.md b/g3doc/api/mlmd.errors/mlmd.errors.md
index 87c3dc594..3c1dada6c 100644
--- a/g3doc/api/mlmd.errors/mlmd.errors.md
+++ b/g3doc/api/mlmd.errors/mlmd.errors.md
@@ -1,3 +1,5 @@
# mlmd.errors
::: ml_metadata.errors
+ options:
+ show_if_no_docstring: true
From f5b18b827927242eb30788d9f9e44161a1d9fb92 Mon Sep 17 00:00:00 2001
From: smokestacklightnin
<125844868+smokestacklightnin@users.noreply.github.com>
Date: Sat, 31 Aug 2024 21:12:51 -0700
Subject: [PATCH 16/31] Use spaces instead of tabs
---
g3doc/api/mlmd.proto/mlmd.proto.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/g3doc/api/mlmd.proto/mlmd.proto.md b/g3doc/api/mlmd.proto/mlmd.proto.md
index 1af4e40d8..e190a876c 100644
--- a/g3doc/api/mlmd.proto/mlmd.proto.md
+++ b/g3doc/api/mlmd.proto/mlmd.proto.md
@@ -1,5 +1,5 @@
# mlmd.proto
::: ml_metadata.proto
- options:
- show_if_no_docstring: true
+ options:
+ show_if_no_docstring: true
From 4802bb27002d6433e0b22f975ede25099d95c018 Mon Sep 17 00:00:00 2001
From: smokestacklightnin
<125844868+smokestacklightnin@users.noreply.github.com>
Date: Sat, 31 Aug 2024 21:14:33 -0700
Subject: [PATCH 17/31] Add overview with links.
Links do not work because API for `mlmd.proto` is not showing up yet. Links will work when this is fixed
---
g3doc/api/mlmd.proto/root.md | 35 +++++++++++++++++++++++++++++++++++
1 file changed, 35 insertions(+)
diff --git a/g3doc/api/mlmd.proto/root.md b/g3doc/api/mlmd.proto/root.md
index e69de29bb..a7cf83f20 100644
--- a/g3doc/api/mlmd.proto/root.md
+++ b/g3doc/api/mlmd.proto/root.md
@@ -0,0 +1,35 @@
+# mlmd.proto
+
+ML Metadata proto module.
+
+## Classes
+
+[`class Artifact`][ml_metadata.proto.Artifact]: An artifact represents an input or an output of individual steps in a ML workflow, e.g., a trained model, an input dataset, and evaluation metrics.
+
+[`class ArtifactType`][ml_metadata.proto.ArtifactType]: A user defined type about a collection of artifacts and their properties that are stored in the metadata store.
+
+[`class Association`][ml_metadata.proto.Association]: An association represents the relationship between executions and contexts.
+
+[`class Attribution`][ml_metadata.proto.Attribution]: An attribution represents the relationship between artifacts and contexts.
+
+[`class ConnectionConfig`][ml_metadata.proto.ConnectionConfig]: A connection configuration specifying the persistent backend to be used with MLMD.
+
+[`class Context`][ml_metadata.proto.Context]: A context defines a group of artifacts and/or executions.
+
+[`class ContextType`][ml_metadata.proto.ContextType]: A user defined type about a collection of contexts and their properties that are stored in the metadata store.
+
+[`class Event`][ml_metadata.proto.Event]: An event records the relationship between artifacts and executions.
+
+[`class Execution`][ml_metadata.proto.Execution]: An execution describes a component run or a step in an ML workflow along with its runtime parameters, e.g., a Trainer run, a data transformation step.
+
+[`class ExecutionType`][ml_metadata.proto.ExecutionType]: A user defined type about a collection of executions and their properties that are stored in the metadata store.
+
+[`class FakeDatabaseConfig`][ml_metadata.proto.FakeDatabaseConfig]: An in-memory database configuration for testing purpose.
+
+[`class MetadataStoreClientConfig`][ml_metadata.proto.MetadataStoreClientConfig]: A connection configuration to use a MLMD server as the persistent backend.
+
+[`class MySQLDatabaseConfig`][ml_metadata.proto.MySQLDatabaseConfig]: A connection configuration to use a MySQL db instance as a MLMD backend.
+
+[`class ParentContext`][ml_metadata.proto.ParentContext]: A parental context represents the relationship between contexts.
+
+[`class SqliteMetadataSourceConfig`][ml_metadata.proto.SqliteMetadataSourceConfig]: A connection configuration to use a Sqlite db file as a MLMD backend.
From 47960a4d8d86e6bccc8e31f8ddf4f3e2bc398d7c Mon Sep 17 00:00:00 2001
From: smokestacklightnin
<125844868+smokestacklightnin@users.noreply.github.com>
Date: Mon, 23 Sep 2024 15:35:45 -0700
Subject: [PATCH 18/31] Add link to tutorial
---
mkdocs.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/mkdocs.yml b/mkdocs.yml
index 6da433c02..727e52c7b 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -108,7 +108,7 @@ watch:
- ml_metadata
nav:
- Getting Started: get_started.md
-
+ - Tutorial: https://tensorflow.github.io/tfx/tutorials/mlmd/mlmd_tutorial/
- API:
- mlmd:
- Overview: api/mlmd/root.md
From ead6757c2760d632340a4ec7d17ad9cb0338cec4 Mon Sep 17 00:00:00 2001
From: smokestacklightnin
<125844868+smokestacklightnin@users.noreply.github.com>
Date: Mon, 23 Sep 2024 15:39:17 -0700
Subject: [PATCH 19/31] Fix repo name
---
mkdocs.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/mkdocs.yml b/mkdocs.yml
index 727e52c7b..2cf697934 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -1,5 +1,5 @@
site_name: "ML Metadata"
-repo_name: "Tensorflow ML Metadata"
+repo_name: "ML Metadata"
repo_url: https://github.com/google/ml-metadata
docs_dir: g3doc
From 18875d81c6f6e08be3508769b41576049c021ca7 Mon Sep 17 00:00:00 2001
From: smokestacklightnin
<125844868+smokestacklightnin@users.noreply.github.com>
Date: Mon, 23 Sep 2024 15:56:26 -0700
Subject: [PATCH 20/31] Rename `root.md` to `index.md`
---
g3doc/api/mlmd.errors/{root.md => index.md} | 0
g3doc/api/mlmd.proto/{root.md => index.md} | 0
g3doc/api/mlmd/{root.md => index.md} | 0
g3doc/get_started.md | 568 --------------------
g3doc/index.md | 568 ++++++++++++++++++++
mkdocs.yml | 8 +-
6 files changed, 572 insertions(+), 572 deletions(-)
rename g3doc/api/mlmd.errors/{root.md => index.md} (100%)
rename g3doc/api/mlmd.proto/{root.md => index.md} (100%)
rename g3doc/api/mlmd/{root.md => index.md} (100%)
delete mode 100644 g3doc/get_started.md
diff --git a/g3doc/api/mlmd.errors/root.md b/g3doc/api/mlmd.errors/index.md
similarity index 100%
rename from g3doc/api/mlmd.errors/root.md
rename to g3doc/api/mlmd.errors/index.md
diff --git a/g3doc/api/mlmd.proto/root.md b/g3doc/api/mlmd.proto/index.md
similarity index 100%
rename from g3doc/api/mlmd.proto/root.md
rename to g3doc/api/mlmd.proto/index.md
diff --git a/g3doc/api/mlmd/root.md b/g3doc/api/mlmd/index.md
similarity index 100%
rename from g3doc/api/mlmd/root.md
rename to g3doc/api/mlmd/index.md
diff --git a/g3doc/get_started.md b/g3doc/get_started.md
deleted file mode 100644
index f9908a2f0..000000000
--- a/g3doc/get_started.md
+++ /dev/null
@@ -1,568 +0,0 @@
-# ML Metadata
-
-[ML Metadata (MLMD)](https://github.com/google/ml-metadata) is a library for
-recording and retrieving metadata associated with ML developer and data
-scientist workflows. MLMD is an integral part of
-[TensorFlow Extended (TFX)](https://www.tensorflow.org/tfx), but is designed so
-that it can be used independently.
-
-Every run of a production ML pipeline generates metadata containing information
-about the various pipeline components, their executions (e.g. training runs),
-and resulting artifacts (e.g. trained models). In the event of unexpected
-pipeline behavior or errors, this metadata can be leveraged to analyze the
-lineage of pipeline components and debug issues. Think of this metadata as the
-equivalent of logging in software development.
-
-MLMD helps you understand and analyze all the interconnected parts of your ML
-pipeline instead of analyzing them in isolation and can help you answer
-questions about your ML pipeline such as:
-
-* Which dataset did the model train on?
-* What were the hyperparameters used to train the model?
-* Which pipeline run created the model?
-* Which training run led to this model?
-* Which version of TensorFlow created this model?
-* When was the failed model pushed?
-
-## Metadata store
-
-MLMD registers the following types of metadata in a database called the
-**Metadata Store**.
-
-1. Metadata about the artifacts generated through the components/steps of your
- ML pipelines
-1. Metadata about the executions of these components/steps
-1. Metadata about pipelines and associated lineage information
-
-The Metadata Store provides APIs to record and retrieve metadata to and from the
-storage backend. The storage backend is pluggable and can be extended. MLMD
-provides reference implementations for SQLite (which supports in-memory and
-disk) and MySQL out of the box.
-
-This graphic shows a high-level overview of the various components that are part
-of MLMD.
-
-![ML Metadata Overview](images/mlmd_overview.png)
-
-### Metadata storage backends and store connection configuration
-
-The `MetadataStore` object receives a connection configuration that corresponds
-to the storage backend used.
-
-* **Fake Database** provides an in-memory DB (using SQLite) for fast
- experimentation and local runs. The database is deleted when the store
- object is destroyed.
-
-```python
-from ml_metadata import metadata_store
-from ml_metadata.proto import metadata_store_pb2
-
-connection_config = metadata_store_pb2.ConnectionConfig()
-connection_config.fake_database.SetInParent() # Sets an empty fake database proto.
-store = metadata_store.MetadataStore(connection_config)
-```
-
-* **SQLite** reads and writes files from disk.
-
-```python
-connection_config = metadata_store_pb2.ConnectionConfig()
-connection_config.sqlite.filename_uri = '...'
-connection_config.sqlite.connection_mode = 3 # READWRITE_OPENCREATE
-store = metadata_store.MetadataStore(connection_config)
-```
-
-* **MySQL** connects to a MySQL server.
-
-```python
-connection_config = metadata_store_pb2.ConnectionConfig()
-connection_config.mysql.host = '...'
-connection_config.mysql.port = '...'
-connection_config.mysql.database = '...'
-connection_config.mysql.user = '...'
-connection_config.mysql.password = '...'
-store = metadata_store.MetadataStore(connection_config)
-```
-
-Similarly, when using a MySQL instance with Google CloudSQL
-([quickstart](https://cloud.google.com/sql/docs/mysql/quickstart),
-[connect-overview](https://cloud.google.com/sql/docs/mysql/connect-overview)),
-one could also use SSL option if applicable.
-
-```python
-connection_config.mysql.ssl_options.key = '...'
-connection_config.mysql.ssl_options.cert = '...'
-connection_config.mysql.ssl_options.ca = '...'
-connection_config.mysql.ssl_options.capath = '...'
-connection_config.mysql.ssl_options.cipher = '...'
-connection_config.mysql.ssl_options.verify_server_cert = '...'
-store = metadata_store.MetadataStore(connection_config)
-```
-
-* **PostgreSQL** connects to a PostgreSQL server.
-
-```python
-connection_config = metadata_store_pb2.ConnectionConfig()
-connection_config.postgresql.host = '...'
-connection_config.postgresql.port = '...'
-connection_config.postgresql.user = '...'
-connection_config.postgresql.password = '...'
-connection_config.postgresql.dbname = '...'
-store = metadata_store.MetadataStore(connection_config)
-```
-
-Similarly, when using a PostgreSQL instance with Google CloudSQL
-([quickstart](https://cloud.google.com/sql/docs/postgres/quickstart),
-[connect-overview](https://cloud.google.com/sql/docs/postgres/connect-overview)),
-one could also use SSL option if applicable.
-
-```python
-connection_config.postgresql.ssloption.sslmode = '...' # disable, allow, verify-ca, verify-full, etc.
-connection_config.postgresql.ssloption.sslcert = '...'
-connection_config.postgresql.ssloption.sslkey = '...'
-connection_config.postgresql.ssloption.sslpassword = '...'
-connection_config.postgresql.ssloption.sslrootcert = '...'
-store = metadata_store.MetadataStore(connection_config)
-```
-
-## Data model
-
-The Metadata Store uses the following data model to record and retrieve metadata
-from the storage backend.
-
-* `ArtifactType` describes an artifact's type and its properties that are
- stored in the metadata store. You can register these types on-the-fly with
- the metadata store in code, or you can load them in the store from a
- serialized format. Once you register a type, its definition is available
- throughout the lifetime of the store.
-* An `Artifact` describes a specific instance of an `ArtifactType`, and its
- properties that are written to the metadata store.
-* An `ExecutionType` describes a type of component or step in a workflow, and
- its runtime parameters.
-* An `Execution` is a record of a component run or a step in an ML workflow
- and the runtime parameters. An execution can be thought of as an instance of
- an `ExecutionType`. Executions are recorded when you run an ML pipeline or
- step.
-* An `Event` is a record of the relationship between artifacts and executions.
- When an execution happens, events record every artifact that was used by the
- execution, and every artifact that was produced. These records allow for
- lineage tracking throughout a workflow. By looking at all events, MLMD knows
- what executions happened and what artifacts were created as a result. MLMD
- can then recurse back from any artifact to all of its upstream inputs.
-* A `ContextType` describes a type of conceptual group of artifacts and
- executions in a workflow, and its structural properties. For example:
- projects, pipeline runs, experiments, owners etc.
-* A `Context` is an instance of a `ContextType`. It captures the shared
- information within the group. For example: project name, changelist commit
- id, experiment annotations etc. It has a user-defined unique name within its
- `ContextType`.
-* An `Attribution` is a record of the relationship between artifacts and
- contexts.
-* An `Association` is a record of the relationship between executions and
- contexts.
-
-## MLMD Functionality
-
-Tracking the inputs and outputs of all components/steps in an ML workflow and
-their lineage allows ML platforms to enable several important features. The
-following list provides a non-exhaustive overview of some of the major benefits.
-
-* **List all Artifacts of a specific type.** Example: all Models that have
- been trained.
-* **Load two Artifacts of the same type for comparison.** Example: compare
- results from two experiments.
-* **Show a DAG of all related executions and their input and output artifacts
- of a context.** Example: visualize the workflow of an experiment for
- debugging and discovery.
-* **Recurse back through all events to see how an artifact was created.**
- Examples: see what data went into a model; enforce data retention plans.
-* **Identify all artifacts that were created using a given artifact.**
- Examples: see all Models trained from a specific dataset; mark models based
- upon bad data.
-* **Determine if an execution has been run on the same inputs before.**
- Example: determine whether a component/step has already completed the same
- work and the previous output can just be reused.
-* **Record and query context of workflow runs.** Examples: track the owner and
- changelist used for a workflow run; group the lineage by experiments; manage
- artifacts by projects.
-* **Declarative nodes filtering capabilities on properties and 1-hop
- neighborhood nodes.** Examples: look for artifacts of a type and under some
- pipeline context; return typed artifacts where a given property’s value is
- within a range; find previous executions in a context with the same inputs.
-
-See the
-[MLMD tutorial](https://www.tensorflow.org/tfx/tutorials/mlmd/mlmd_tutorial) for
-an example that shows you how to use the MLMD API and the metadata store to
-retrieve lineage information.
-
-### Integrate ML Metadata into your ML Workflows
-
-If you are a platform developer interested in integrating MLMD into your system,
-use the example workflow below to use the low-level MLMD APIs to track the
-execution of a training task. You can also use higher-level Python APIs in
-notebook environments to record experiment metadata.
-
-![ML Metadata Example Flow](images/mlmd_flow.png)
-
-1) Register artifact types
-
-```python
-# Create ArtifactTypes, e.g., Data and Model
-data_type = metadata_store_pb2.ArtifactType()
-data_type.name = "DataSet"
-data_type.properties["day"] = metadata_store_pb2.INT
-data_type.properties["split"] = metadata_store_pb2.STRING
-data_type_id = store.put_artifact_type(data_type)
-
-model_type = metadata_store_pb2.ArtifactType()
-model_type.name = "SavedModel"
-model_type.properties["version"] = metadata_store_pb2.INT
-model_type.properties["name"] = metadata_store_pb2.STRING
-model_type_id = store.put_artifact_type(model_type)
-
-# Query all registered Artifact types.
-artifact_types = store.get_artifact_types()
-```
-
-2) Register execution types for all steps in the ML workflow
-
-```python
-# Create an ExecutionType, e.g., Trainer
-trainer_type = metadata_store_pb2.ExecutionType()
-trainer_type.name = "Trainer"
-trainer_type.properties["state"] = metadata_store_pb2.STRING
-trainer_type_id = store.put_execution_type(trainer_type)
-
-# Query a registered Execution type with the returned id
-[registered_type] = store.get_execution_types_by_id([trainer_type_id])
-```
-
-3) Create an artifact of DataSet ArtifactType
-
-```python
-# Create an input artifact of type DataSet
-data_artifact = metadata_store_pb2.Artifact()
-data_artifact.uri = 'path/to/data'
-data_artifact.properties["day"].int_value = 1
-data_artifact.properties["split"].string_value = 'train'
-data_artifact.type_id = data_type_id
-[data_artifact_id] = store.put_artifacts([data_artifact])
-
-# Query all registered Artifacts
-artifacts = store.get_artifacts()
-
-# Plus, there are many ways to query the same Artifact
-[stored_data_artifact] = store.get_artifacts_by_id([data_artifact_id])
-artifacts_with_uri = store.get_artifacts_by_uri(data_artifact.uri)
-artifacts_with_conditions = store.get_artifacts(
- list_options=mlmd.ListOptions(
- filter_query='uri LIKE "%/data" AND properties.day.int_value > 0'))
-```
-
-4) Create an execution of the Trainer run
-
-```python
-# Register the Execution of a Trainer run
-trainer_run = metadata_store_pb2.Execution()
-trainer_run.type_id = trainer_type_id
-trainer_run.properties["state"].string_value = "RUNNING"
-[run_id] = store.put_executions([trainer_run])
-
-# Query all registered Execution
-executions = store.get_executions_by_id([run_id])
-# Similarly, the same execution can be queried with conditions.
-executions_with_conditions = store.get_executions(
- list_options = mlmd.ListOptions(
- filter_query='type = "Trainer" AND properties.state.string_value IS NOT NULL'))
-```
-
-5) Define the input event and read data
-
-```python
-# Define the input event
-input_event = metadata_store_pb2.Event()
-input_event.artifact_id = data_artifact_id
-input_event.execution_id = run_id
-input_event.type = metadata_store_pb2.Event.DECLARED_INPUT
-
-# Record the input event in the metadata store
-store.put_events([input_event])
-```
-
-6) Declare the output artifact
-
-```python
-# Declare the output artifact of type SavedModel
-model_artifact = metadata_store_pb2.Artifact()
-model_artifact.uri = 'path/to/model/file'
-model_artifact.properties["version"].int_value = 1
-model_artifact.properties["name"].string_value = 'MNIST-v1'
-model_artifact.type_id = model_type_id
-[model_artifact_id] = store.put_artifacts([model_artifact])
-```
-
-7) Record the output event
-
-```python
-# Declare the output event
-output_event = metadata_store_pb2.Event()
-output_event.artifact_id = model_artifact_id
-output_event.execution_id = run_id
-output_event.type = metadata_store_pb2.Event.DECLARED_OUTPUT
-
-# Submit output event to the Metadata Store
-store.put_events([output_event])
-```
-
-8) Mark the execution as completed
-
-```python
-trainer_run.id = run_id
-trainer_run.properties["state"].string_value = "COMPLETED"
-store.put_executions([trainer_run])
-```
-
-9) Group artifacts and executions under a context using attributions and
-assertions artifacts
-
-```python
-# Create a ContextType, e.g., Experiment with a note property
-experiment_type = metadata_store_pb2.ContextType()
-experiment_type.name = "Experiment"
-experiment_type.properties["note"] = metadata_store_pb2.STRING
-experiment_type_id = store.put_context_type(experiment_type)
-
-# Group the model and the trainer run to an experiment.
-my_experiment = metadata_store_pb2.Context()
-my_experiment.type_id = experiment_type_id
-# Give the experiment a name
-my_experiment.name = "exp1"
-my_experiment.properties["note"].string_value = "My first experiment."
-[experiment_id] = store.put_contexts([my_experiment])
-
-attribution = metadata_store_pb2.Attribution()
-attribution.artifact_id = model_artifact_id
-attribution.context_id = experiment_id
-
-association = metadata_store_pb2.Association()
-association.execution_id = run_id
-association.context_id = experiment_id
-
-store.put_attributions_and_associations([attribution], [association])
-
-# Query the Artifacts and Executions that are linked to the Context.
-experiment_artifacts = store.get_artifacts_by_context(experiment_id)
-experiment_executions = store.get_executions_by_context(experiment_id)
-
-# You can also use neighborhood queries to fetch these artifacts and executions
-# with conditions.
-experiment_artifacts_with_conditions = store.get_artifacts(
- list_options = mlmd.ListOptions(
- filter_query=('contexts_a.type = "Experiment" AND contexts_a.name = "exp1"')))
-experiment_executions_with_conditions = store.get_executions(
- list_options = mlmd.ListOptions(
- filter_query=('contexts_a.id = {}'.format(experiment_id))))
-```
-
-## Use MLMD with a remote gRPC server
-
-You can use MLMD with remote gRPC servers as shown below:
-
-* Start a server
-
-```bash
-bazel run -c opt --define grpc_no_ares=true //ml_metadata/metadata_store:metadata_store_server
-```
-
-By default, the server uses a fake in-memory db per request and does not persist
-the metadata across calls. It can also be configured with a MLMD
-`MetadataStoreServerConfig` to use SQLite files or MySQL instances. The config
-can be stored in a text protobuf file and passed to the binary with
-`--metadata_store_server_config_file=path_to_the_config_file`.
-
-An example `MetadataStoreServerConfig` file in text protobuf format:
-
-```textpb
-connection_config {
- sqlite {
- filename_uri: '/tmp/test_db'
- connection_mode: READWRITE_OPENCREATE
- }
-}
-```
-
-* Create the client stub and use it in Python
-
-```python
-from grpc import insecure_channel
-from ml_metadata.proto import metadata_store_pb2
-from ml_metadata.proto import metadata_store_service_pb2
-from ml_metadata.proto import metadata_store_service_pb2_grpc
-
-channel = insecure_channel('localhost:8080')
-stub = metadata_store_service_pb2_grpc.MetadataStoreServiceStub(channel)
-```
-
-* Use MLMD with RPC calls
-
-```python
-# Create ArtifactTypes, e.g., Data and Model
-data_type = metadata_store_pb2.ArtifactType()
-data_type.name = "DataSet"
-data_type.properties["day"] = metadata_store_pb2.INT
-data_type.properties["split"] = metadata_store_pb2.STRING
-
-request = metadata_store_service_pb2.PutArtifactTypeRequest()
-request.all_fields_match = True
-request.artifact_type.CopyFrom(data_type)
-stub.PutArtifactType(request)
-
-model_type = metadata_store_pb2.ArtifactType()
-model_type.name = "SavedModel"
-model_type.properties["version"] = metadata_store_pb2.INT
-model_type.properties["name"] = metadata_store_pb2.STRING
-
-request.artifact_type.CopyFrom(model_type)
-stub.PutArtifactType(request)
-```
-
-## Upgrade the MLMD library
-
-When using a new MLMD release or your own build with an existing MLMD database,
-there may be changes to the database schema. Unless a breaking change is
-explicitly mentioned in the release note, all MLMD database schema changes are
-transparent for the MLMD API users. If there is a breaking change notice, then
-old databases can still be upgraded to use the new MLMD library.
-
-When the MLMD library connects to the database, it compares the expected schema
-version of the MLMD library (`library_version`) with the schema version
-(`db_version`) recorded in the given database. By default, MLMD will check the
-compatibility and raise errors when the versions are incompatible.
-
-* If `library_version` is compatible with `db_version`, nothing happens.
-* If `library_version` is newer than `db_version`, and auto-migration is not
- enabled, then MLMD raises a failed precondition error with the following
- message:
-
- ```
- MLMD database version $db_version is older than library version
- $library_version. Schema migration is disabled. Please upgrade the
- database then use the library version; or switch to a older library
- version to use the current database.
- ```
-
-* If `library_version` is older than `db_version`, by default MLMD library
- returns errors to prevent any data loss. In this case, you should upgrade
- the library version before using that database.
-
-### Upgrade the database schema
-
-MLMD provides utilities to upgrade the database version.
-
-For example, when connecting to a backend with a Python library:
-
-```python {highlight="range:enable,True"}
-connection_config = metadata_store_pb2.ConnectionConfig()
-connection_config.sqlite.filename_uri = '...'
-store = metadata_store.MetadataStore(connection_config,
- enable_upgrade_migration=True)
-```
-
-Or when using gRPC server, set the MetadataStoreServerConfig as follows:
-
-```python
-connection_config {
- ...
-}
-migration_options {
- enable_upgrade_migration: true
-}
-```
-
-MLMD then evolves the database by executing a series of migration scripts. If
-the backend supports DDL queries within a transaction (e.g., SQLite), MLMD runs
-the steps together within a single transaction, and the transaction is
-rolled-back when an error occurs. The migration script is provided together with
-any schema-change commit and verified through testing.
-
-Note: The migration DDLs in MySQL are not transactional. When using MySQL, there
-should only be a single connection with the upgrade migration enabled to use the
-old database. Take a backup of the database before upgrading to prevent
-potential data losses.
-
-### Downgrade the database schema
-
-A misconfiguration in the deployment of MLMD may cause an accidental upgrade,
-e.g., when you tries out a new version of the library and accidentally connect
-to the production instance of MLMD and upgrade the database. To recover from
-these situations, MLMD provides a downgrade feature. During connection, if the
-migration options specify the `downgrade_to_schema_version`, MLMD will run a
-downgrade transaction to revert the schema version and migrate the data, then
-terminate the connection. Once the downgrade is done, use the older version of
-the library to connect to the database.
-
-For example:
-
-```python
-connection_config = metadata_store_pb2.ConnectionConfig()
-connection_config.sqlite.filename_uri = '...'
-metadata_store.downgrade_schema(connection_config,
- downgrade_to_schema_version = 0)
-```
-
-Note: When downgrading, MLMD prevents data loss as much as possible. However,
-newer schema versions might be inherently more expressive and hence a downgrade
-can introduce data loss. When using backends that do not support DDL
-transactions (e.g., MySQL), the database should be backed up before downgrading
-and the downgrade script should be the only MLMD connection to the database.
-
-The list of `schema_version` used in MLMD releases are:
-
-ml-metadata (MLMD) | schema_version
------------------- | --------------
-1.16.0 | 10
-1.15.0 | 10
-1.14.0 | 10
-1.13.1 | 10
-1.13.0 | 10
-1.12.0 | 10
-1.11.0 | 10
-1.10.0 | 8
-1.9.0 | 8
-1.8.0 | 8
-1.7.0 | 8
-1.6.0 | 7
-1.5.0 | 7
-1.4.0 | 7
-1.3.0 | 7
-1.2.0 | 7
-1.1.0 | 7
-1.0.0 | 6
-0.30.0 | 6
-0.29.0 | 6
-0.28.0 | 6
-0.27.0 | 6
-0.26.0 | 6
-0.25.1 | 6
-0.24.0 | 5
-0.23.0 | 5
-0.22.1 | 5
-0.21.2 | 4
-0.15.2 | 4
-0.14.0 | 4
-0.13.2 | 0
-
-## Resources
-
-The MLMD library has a high-level API that you can readily use with your ML
-pipelines. See the
-[MLMD API documentation](https://www.tensorflow.org/tfx/ml_metadata/api_docs/python/mlmd)
-for more details.
-
-Check out
-[MLMD Declarative Nodes Filtering](https://github.com/google/ml-metadata/blob/v1.2.0/ml_metadata/proto/metadata_store.proto#L708-L786)
-to learn how to use MLMD declarative nodes filtering capabilities on properties
-and 1-hop neighborhood nodes.
-
-Also check out the
-[MLMD tutorial](https://www.tensorflow.org/tfx/tutorials/mlmd/mlmd_tutorial) to
-learn how to use MLMD to trace the lineage of your pipeline components.
diff --git a/g3doc/index.md b/g3doc/index.md
index e69de29bb..f9908a2f0 100644
--- a/g3doc/index.md
+++ b/g3doc/index.md
@@ -0,0 +1,568 @@
+# ML Metadata
+
+[ML Metadata (MLMD)](https://github.com/google/ml-metadata) is a library for
+recording and retrieving metadata associated with ML developer and data
+scientist workflows. MLMD is an integral part of
+[TensorFlow Extended (TFX)](https://www.tensorflow.org/tfx), but is designed so
+that it can be used independently.
+
+Every run of a production ML pipeline generates metadata containing information
+about the various pipeline components, their executions (e.g. training runs),
+and resulting artifacts (e.g. trained models). In the event of unexpected
+pipeline behavior or errors, this metadata can be leveraged to analyze the
+lineage of pipeline components and debug issues. Think of this metadata as the
+equivalent of logging in software development.
+
+MLMD helps you understand and analyze all the interconnected parts of your ML
+pipeline instead of analyzing them in isolation and can help you answer
+questions about your ML pipeline such as:
+
+* Which dataset did the model train on?
+* What were the hyperparameters used to train the model?
+* Which pipeline run created the model?
+* Which training run led to this model?
+* Which version of TensorFlow created this model?
+* When was the failed model pushed?
+
+## Metadata store
+
+MLMD registers the following types of metadata in a database called the
+**Metadata Store**.
+
+1. Metadata about the artifacts generated through the components/steps of your
+ ML pipelines
+1. Metadata about the executions of these components/steps
+1. Metadata about pipelines and associated lineage information
+
+The Metadata Store provides APIs to record and retrieve metadata to and from the
+storage backend. The storage backend is pluggable and can be extended. MLMD
+provides reference implementations for SQLite (which supports in-memory and
+disk) and MySQL out of the box.
+
+This graphic shows a high-level overview of the various components that are part
+of MLMD.
+
+![ML Metadata Overview](images/mlmd_overview.png)
+
+### Metadata storage backends and store connection configuration
+
+The `MetadataStore` object receives a connection configuration that corresponds
+to the storage backend used.
+
+* **Fake Database** provides an in-memory DB (using SQLite) for fast
+ experimentation and local runs. The database is deleted when the store
+ object is destroyed.
+
+```python
+from ml_metadata import metadata_store
+from ml_metadata.proto import metadata_store_pb2
+
+connection_config = metadata_store_pb2.ConnectionConfig()
+connection_config.fake_database.SetInParent() # Sets an empty fake database proto.
+store = metadata_store.MetadataStore(connection_config)
+```
+
+* **SQLite** reads and writes files from disk.
+
+```python
+connection_config = metadata_store_pb2.ConnectionConfig()
+connection_config.sqlite.filename_uri = '...'
+connection_config.sqlite.connection_mode = 3 # READWRITE_OPENCREATE
+store = metadata_store.MetadataStore(connection_config)
+```
+
+* **MySQL** connects to a MySQL server.
+
+```python
+connection_config = metadata_store_pb2.ConnectionConfig()
+connection_config.mysql.host = '...'
+connection_config.mysql.port = '...'
+connection_config.mysql.database = '...'
+connection_config.mysql.user = '...'
+connection_config.mysql.password = '...'
+store = metadata_store.MetadataStore(connection_config)
+```
+
+Similarly, when using a MySQL instance with Google CloudSQL
+([quickstart](https://cloud.google.com/sql/docs/mysql/quickstart),
+[connect-overview](https://cloud.google.com/sql/docs/mysql/connect-overview)),
+one could also use SSL option if applicable.
+
+```python
+connection_config.mysql.ssl_options.key = '...'
+connection_config.mysql.ssl_options.cert = '...'
+connection_config.mysql.ssl_options.ca = '...'
+connection_config.mysql.ssl_options.capath = '...'
+connection_config.mysql.ssl_options.cipher = '...'
+connection_config.mysql.ssl_options.verify_server_cert = '...'
+store = metadata_store.MetadataStore(connection_config)
+```
+
+* **PostgreSQL** connects to a PostgreSQL server.
+
+```python
+connection_config = metadata_store_pb2.ConnectionConfig()
+connection_config.postgresql.host = '...'
+connection_config.postgresql.port = '...'
+connection_config.postgresql.user = '...'
+connection_config.postgresql.password = '...'
+connection_config.postgresql.dbname = '...'
+store = metadata_store.MetadataStore(connection_config)
+```
+
+Similarly, when using a PostgreSQL instance with Google CloudSQL
+([quickstart](https://cloud.google.com/sql/docs/postgres/quickstart),
+[connect-overview](https://cloud.google.com/sql/docs/postgres/connect-overview)),
+one could also use SSL option if applicable.
+
+```python
+connection_config.postgresql.ssloption.sslmode = '...' # disable, allow, verify-ca, verify-full, etc.
+connection_config.postgresql.ssloption.sslcert = '...'
+connection_config.postgresql.ssloption.sslkey = '...'
+connection_config.postgresql.ssloption.sslpassword = '...'
+connection_config.postgresql.ssloption.sslrootcert = '...'
+store = metadata_store.MetadataStore(connection_config)
+```
+
+## Data model
+
+The Metadata Store uses the following data model to record and retrieve metadata
+from the storage backend.
+
+* `ArtifactType` describes an artifact's type and its properties that are
+ stored in the metadata store. You can register these types on-the-fly with
+ the metadata store in code, or you can load them in the store from a
+ serialized format. Once you register a type, its definition is available
+ throughout the lifetime of the store.
+* An `Artifact` describes a specific instance of an `ArtifactType`, and its
+ properties that are written to the metadata store.
+* An `ExecutionType` describes a type of component or step in a workflow, and
+ its runtime parameters.
+* An `Execution` is a record of a component run or a step in an ML workflow
+ and the runtime parameters. An execution can be thought of as an instance of
+ an `ExecutionType`. Executions are recorded when you run an ML pipeline or
+ step.
+* An `Event` is a record of the relationship between artifacts and executions.
+ When an execution happens, events record every artifact that was used by the
+ execution, and every artifact that was produced. These records allow for
+ lineage tracking throughout a workflow. By looking at all events, MLMD knows
+ what executions happened and what artifacts were created as a result. MLMD
+ can then recurse back from any artifact to all of its upstream inputs.
+* A `ContextType` describes a type of conceptual group of artifacts and
+ executions in a workflow, and its structural properties. For example:
+ projects, pipeline runs, experiments, owners etc.
+* A `Context` is an instance of a `ContextType`. It captures the shared
+ information within the group. For example: project name, changelist commit
+ id, experiment annotations etc. It has a user-defined unique name within its
+ `ContextType`.
+* An `Attribution` is a record of the relationship between artifacts and
+ contexts.
+* An `Association` is a record of the relationship between executions and
+ contexts.
+
+## MLMD Functionality
+
+Tracking the inputs and outputs of all components/steps in an ML workflow and
+their lineage allows ML platforms to enable several important features. The
+following list provides a non-exhaustive overview of some of the major benefits.
+
+* **List all Artifacts of a specific type.** Example: all Models that have
+ been trained.
+* **Load two Artifacts of the same type for comparison.** Example: compare
+ results from two experiments.
+* **Show a DAG of all related executions and their input and output artifacts
+ of a context.** Example: visualize the workflow of an experiment for
+ debugging and discovery.
+* **Recurse back through all events to see how an artifact was created.**
+ Examples: see what data went into a model; enforce data retention plans.
+* **Identify all artifacts that were created using a given artifact.**
+ Examples: see all Models trained from a specific dataset; mark models based
+ upon bad data.
+* **Determine if an execution has been run on the same inputs before.**
+ Example: determine whether a component/step has already completed the same
+ work and the previous output can just be reused.
+* **Record and query context of workflow runs.** Examples: track the owner and
+ changelist used for a workflow run; group the lineage by experiments; manage
+ artifacts by projects.
+* **Declarative nodes filtering capabilities on properties and 1-hop
+ neighborhood nodes.** Examples: look for artifacts of a type and under some
+ pipeline context; return typed artifacts where a given property’s value is
+ within a range; find previous executions in a context with the same inputs.
+
+See the
+[MLMD tutorial](https://www.tensorflow.org/tfx/tutorials/mlmd/mlmd_tutorial) for
+an example that shows you how to use the MLMD API and the metadata store to
+retrieve lineage information.
+
+### Integrate ML Metadata into your ML Workflows
+
+If you are a platform developer interested in integrating MLMD into your system,
+use the example workflow below to use the low-level MLMD APIs to track the
+execution of a training task. You can also use higher-level Python APIs in
+notebook environments to record experiment metadata.
+
+![ML Metadata Example Flow](images/mlmd_flow.png)
+
+1) Register artifact types
+
+```python
+# Create ArtifactTypes, e.g., Data and Model
+data_type = metadata_store_pb2.ArtifactType()
+data_type.name = "DataSet"
+data_type.properties["day"] = metadata_store_pb2.INT
+data_type.properties["split"] = metadata_store_pb2.STRING
+data_type_id = store.put_artifact_type(data_type)
+
+model_type = metadata_store_pb2.ArtifactType()
+model_type.name = "SavedModel"
+model_type.properties["version"] = metadata_store_pb2.INT
+model_type.properties["name"] = metadata_store_pb2.STRING
+model_type_id = store.put_artifact_type(model_type)
+
+# Query all registered Artifact types.
+artifact_types = store.get_artifact_types()
+```
+
+2) Register execution types for all steps in the ML workflow
+
+```python
+# Create an ExecutionType, e.g., Trainer
+trainer_type = metadata_store_pb2.ExecutionType()
+trainer_type.name = "Trainer"
+trainer_type.properties["state"] = metadata_store_pb2.STRING
+trainer_type_id = store.put_execution_type(trainer_type)
+
+# Query a registered Execution type with the returned id
+[registered_type] = store.get_execution_types_by_id([trainer_type_id])
+```
+
+3) Create an artifact of DataSet ArtifactType
+
+```python
+# Create an input artifact of type DataSet
+data_artifact = metadata_store_pb2.Artifact()
+data_artifact.uri = 'path/to/data'
+data_artifact.properties["day"].int_value = 1
+data_artifact.properties["split"].string_value = 'train'
+data_artifact.type_id = data_type_id
+[data_artifact_id] = store.put_artifacts([data_artifact])
+
+# Query all registered Artifacts
+artifacts = store.get_artifacts()
+
+# Plus, there are many ways to query the same Artifact
+[stored_data_artifact] = store.get_artifacts_by_id([data_artifact_id])
+artifacts_with_uri = store.get_artifacts_by_uri(data_artifact.uri)
+artifacts_with_conditions = store.get_artifacts(
+ list_options=mlmd.ListOptions(
+ filter_query='uri LIKE "%/data" AND properties.day.int_value > 0'))
+```
+
+4) Create an execution of the Trainer run
+
+```python
+# Register the Execution of a Trainer run
+trainer_run = metadata_store_pb2.Execution()
+trainer_run.type_id = trainer_type_id
+trainer_run.properties["state"].string_value = "RUNNING"
+[run_id] = store.put_executions([trainer_run])
+
+# Query all registered Execution
+executions = store.get_executions_by_id([run_id])
+# Similarly, the same execution can be queried with conditions.
+executions_with_conditions = store.get_executions(
+ list_options = mlmd.ListOptions(
+ filter_query='type = "Trainer" AND properties.state.string_value IS NOT NULL'))
+```
+
+5) Define the input event and read data
+
+```python
+# Define the input event
+input_event = metadata_store_pb2.Event()
+input_event.artifact_id = data_artifact_id
+input_event.execution_id = run_id
+input_event.type = metadata_store_pb2.Event.DECLARED_INPUT
+
+# Record the input event in the metadata store
+store.put_events([input_event])
+```
+
+6) Declare the output artifact
+
+```python
+# Declare the output artifact of type SavedModel
+model_artifact = metadata_store_pb2.Artifact()
+model_artifact.uri = 'path/to/model/file'
+model_artifact.properties["version"].int_value = 1
+model_artifact.properties["name"].string_value = 'MNIST-v1'
+model_artifact.type_id = model_type_id
+[model_artifact_id] = store.put_artifacts([model_artifact])
+```
+
+7) Record the output event
+
+```python
+# Declare the output event
+output_event = metadata_store_pb2.Event()
+output_event.artifact_id = model_artifact_id
+output_event.execution_id = run_id
+output_event.type = metadata_store_pb2.Event.DECLARED_OUTPUT
+
+# Submit output event to the Metadata Store
+store.put_events([output_event])
+```
+
+8) Mark the execution as completed
+
+```python
+trainer_run.id = run_id
+trainer_run.properties["state"].string_value = "COMPLETED"
+store.put_executions([trainer_run])
+```
+
+9) Group artifacts and executions under a context using attributions and
+assertions artifacts
+
+```python
+# Create a ContextType, e.g., Experiment with a note property
+experiment_type = metadata_store_pb2.ContextType()
+experiment_type.name = "Experiment"
+experiment_type.properties["note"] = metadata_store_pb2.STRING
+experiment_type_id = store.put_context_type(experiment_type)
+
+# Group the model and the trainer run to an experiment.
+my_experiment = metadata_store_pb2.Context()
+my_experiment.type_id = experiment_type_id
+# Give the experiment a name
+my_experiment.name = "exp1"
+my_experiment.properties["note"].string_value = "My first experiment."
+[experiment_id] = store.put_contexts([my_experiment])
+
+attribution = metadata_store_pb2.Attribution()
+attribution.artifact_id = model_artifact_id
+attribution.context_id = experiment_id
+
+association = metadata_store_pb2.Association()
+association.execution_id = run_id
+association.context_id = experiment_id
+
+store.put_attributions_and_associations([attribution], [association])
+
+# Query the Artifacts and Executions that are linked to the Context.
+experiment_artifacts = store.get_artifacts_by_context(experiment_id)
+experiment_executions = store.get_executions_by_context(experiment_id)
+
+# You can also use neighborhood queries to fetch these artifacts and executions
+# with conditions.
+experiment_artifacts_with_conditions = store.get_artifacts(
+ list_options = mlmd.ListOptions(
+ filter_query=('contexts_a.type = "Experiment" AND contexts_a.name = "exp1"')))
+experiment_executions_with_conditions = store.get_executions(
+ list_options = mlmd.ListOptions(
+ filter_query=('contexts_a.id = {}'.format(experiment_id))))
+```
+
+## Use MLMD with a remote gRPC server
+
+You can use MLMD with remote gRPC servers as shown below:
+
+* Start a server
+
+```bash
+bazel run -c opt --define grpc_no_ares=true //ml_metadata/metadata_store:metadata_store_server
+```
+
+By default, the server uses a fake in-memory db per request and does not persist
+the metadata across calls. It can also be configured with a MLMD
+`MetadataStoreServerConfig` to use SQLite files or MySQL instances. The config
+can be stored in a text protobuf file and passed to the binary with
+`--metadata_store_server_config_file=path_to_the_config_file`.
+
+An example `MetadataStoreServerConfig` file in text protobuf format:
+
+```textpb
+connection_config {
+ sqlite {
+ filename_uri: '/tmp/test_db'
+ connection_mode: READWRITE_OPENCREATE
+ }
+}
+```
+
+* Create the client stub and use it in Python
+
+```python
+from grpc import insecure_channel
+from ml_metadata.proto import metadata_store_pb2
+from ml_metadata.proto import metadata_store_service_pb2
+from ml_metadata.proto import metadata_store_service_pb2_grpc
+
+channel = insecure_channel('localhost:8080')
+stub = metadata_store_service_pb2_grpc.MetadataStoreServiceStub(channel)
+```
+
+* Use MLMD with RPC calls
+
+```python
+# Create ArtifactTypes, e.g., Data and Model
+data_type = metadata_store_pb2.ArtifactType()
+data_type.name = "DataSet"
+data_type.properties["day"] = metadata_store_pb2.INT
+data_type.properties["split"] = metadata_store_pb2.STRING
+
+request = metadata_store_service_pb2.PutArtifactTypeRequest()
+request.all_fields_match = True
+request.artifact_type.CopyFrom(data_type)
+stub.PutArtifactType(request)
+
+model_type = metadata_store_pb2.ArtifactType()
+model_type.name = "SavedModel"
+model_type.properties["version"] = metadata_store_pb2.INT
+model_type.properties["name"] = metadata_store_pb2.STRING
+
+request.artifact_type.CopyFrom(model_type)
+stub.PutArtifactType(request)
+```
+
+## Upgrade the MLMD library
+
+When using a new MLMD release or your own build with an existing MLMD database,
+there may be changes to the database schema. Unless a breaking change is
+explicitly mentioned in the release note, all MLMD database schema changes are
+transparent for the MLMD API users. If there is a breaking change notice, then
+old databases can still be upgraded to use the new MLMD library.
+
+When the MLMD library connects to the database, it compares the expected schema
+version of the MLMD library (`library_version`) with the schema version
+(`db_version`) recorded in the given database. By default, MLMD will check the
+compatibility and raise errors when the versions are incompatible.
+
+* If `library_version` is compatible with `db_version`, nothing happens.
+* If `library_version` is newer than `db_version`, and auto-migration is not
+ enabled, then MLMD raises a failed precondition error with the following
+ message:
+
+ ```
+ MLMD database version $db_version is older than library version
+ $library_version. Schema migration is disabled. Please upgrade the
+ database then use the library version; or switch to a older library
+ version to use the current database.
+ ```
+
+* If `library_version` is older than `db_version`, by default MLMD library
+ returns errors to prevent any data loss. In this case, you should upgrade
+ the library version before using that database.
+
+### Upgrade the database schema
+
+MLMD provides utilities to upgrade the database version.
+
+For example, when connecting to a backend with a Python library:
+
+```python {highlight="range:enable,True"}
+connection_config = metadata_store_pb2.ConnectionConfig()
+connection_config.sqlite.filename_uri = '...'
+store = metadata_store.MetadataStore(connection_config,
+ enable_upgrade_migration=True)
+```
+
+Or when using gRPC server, set the MetadataStoreServerConfig as follows:
+
+```python
+connection_config {
+ ...
+}
+migration_options {
+ enable_upgrade_migration: true
+}
+```
+
+MLMD then evolves the database by executing a series of migration scripts. If
+the backend supports DDL queries within a transaction (e.g., SQLite), MLMD runs
+the steps together within a single transaction, and the transaction is
+rolled-back when an error occurs. The migration script is provided together with
+any schema-change commit and verified through testing.
+
+Note: The migration DDLs in MySQL are not transactional. When using MySQL, there
+should only be a single connection with the upgrade migration enabled to use the
+old database. Take a backup of the database before upgrading to prevent
+potential data losses.
+
+### Downgrade the database schema
+
+A misconfiguration in the deployment of MLMD may cause an accidental upgrade,
+e.g., when you tries out a new version of the library and accidentally connect
+to the production instance of MLMD and upgrade the database. To recover from
+these situations, MLMD provides a downgrade feature. During connection, if the
+migration options specify the `downgrade_to_schema_version`, MLMD will run a
+downgrade transaction to revert the schema version and migrate the data, then
+terminate the connection. Once the downgrade is done, use the older version of
+the library to connect to the database.
+
+For example:
+
+```python
+connection_config = metadata_store_pb2.ConnectionConfig()
+connection_config.sqlite.filename_uri = '...'
+metadata_store.downgrade_schema(connection_config,
+ downgrade_to_schema_version = 0)
+```
+
+Note: When downgrading, MLMD prevents data loss as much as possible. However,
+newer schema versions might be inherently more expressive and hence a downgrade
+can introduce data loss. When using backends that do not support DDL
+transactions (e.g., MySQL), the database should be backed up before downgrading
+and the downgrade script should be the only MLMD connection to the database.
+
+The list of `schema_version` used in MLMD releases are:
+
+ml-metadata (MLMD) | schema_version
+------------------ | --------------
+1.16.0 | 10
+1.15.0 | 10
+1.14.0 | 10
+1.13.1 | 10
+1.13.0 | 10
+1.12.0 | 10
+1.11.0 | 10
+1.10.0 | 8
+1.9.0 | 8
+1.8.0 | 8
+1.7.0 | 8
+1.6.0 | 7
+1.5.0 | 7
+1.4.0 | 7
+1.3.0 | 7
+1.2.0 | 7
+1.1.0 | 7
+1.0.0 | 6
+0.30.0 | 6
+0.29.0 | 6
+0.28.0 | 6
+0.27.0 | 6
+0.26.0 | 6
+0.25.1 | 6
+0.24.0 | 5
+0.23.0 | 5
+0.22.1 | 5
+0.21.2 | 4
+0.15.2 | 4
+0.14.0 | 4
+0.13.2 | 0
+
+## Resources
+
+The MLMD library has a high-level API that you can readily use with your ML
+pipelines. See the
+[MLMD API documentation](https://www.tensorflow.org/tfx/ml_metadata/api_docs/python/mlmd)
+for more details.
+
+Check out
+[MLMD Declarative Nodes Filtering](https://github.com/google/ml-metadata/blob/v1.2.0/ml_metadata/proto/metadata_store.proto#L708-L786)
+to learn how to use MLMD declarative nodes filtering capabilities on properties
+and 1-hop neighborhood nodes.
+
+Also check out the
+[MLMD tutorial](https://www.tensorflow.org/tfx/tutorials/mlmd/mlmd_tutorial) to
+learn how to use MLMD to trace the lineage of your pipeline components.
diff --git a/mkdocs.yml b/mkdocs.yml
index 2cf697934..6daf47ca6 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -107,15 +107,15 @@ extra_javascript:
watch:
- ml_metadata
nav:
- - Getting Started: get_started.md
+ - Guide: index.md
- Tutorial: https://tensorflow.github.io/tfx/tutorials/mlmd/mlmd_tutorial/
- API:
- mlmd:
- - Overview: api/mlmd/root.md
+ - Overview: api/mlmd/
- mlmd: api/mlmd/mlmd.md
- mlmd.errors:
- - Overview: api/mlmd.errors/root.md
+ - Overview: api/mlmd.errors/
- mlmd.errors: api/mlmd.errors/mlmd.errors.md
- mlmd.proto:
- - Overview: api/mlmd.proto/root.md
+ - Overview: api/mlmd.proto/
- mlmd.proto: api/mlmd.proto/mlmd.proto.md
From f5de8a54b4dc9765e7e62fdfaefbf7bbeff0e9c3 Mon Sep 17 00:00:00 2001
From: smokestacklightnin
<125844868+smokestacklightnin@users.noreply.github.com>
Date: Mon, 23 Sep 2024 16:03:40 -0700
Subject: [PATCH 21/31] Change links to point internally
---
g3doc/index.md | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/g3doc/index.md b/g3doc/index.md
index f9908a2f0..37f11e6f1 100644
--- a/g3doc/index.md
+++ b/g3doc/index.md
@@ -190,7 +190,7 @@ following list provides a non-exhaustive overview of some of the major benefits.
within a range; find previous executions in a context with the same inputs.
See the
-[MLMD tutorial](https://www.tensorflow.org/tfx/tutorials/mlmd/mlmd_tutorial) for
+[MLMD tutorial](https://tensorflow.github.io/tfx/tutorials/mlmd/mlmd_tutorial/) for
an example that shows you how to use the MLMD API and the metadata store to
retrieve lineage information.
@@ -555,7 +555,7 @@ ml-metadata (MLMD) | schema_version
The MLMD library has a high-level API that you can readily use with your ML
pipelines. See the
-[MLMD API documentation](https://www.tensorflow.org/tfx/ml_metadata/api_docs/python/mlmd)
+[MLMD API documentation](../api/mlmd)
for more details.
Check out
@@ -564,5 +564,5 @@ to learn how to use MLMD declarative nodes filtering capabilities on properties
and 1-hop neighborhood nodes.
Also check out the
-[MLMD tutorial](https://www.tensorflow.org/tfx/tutorials/mlmd/mlmd_tutorial) to
+[MLMD tutorial](https://tensorflow.github.io/tfx/tutorials/mlmd/mlmd_tutorial/) to
learn how to use MLMD to trace the lineage of your pipeline components.
From 61b75e9324cf9a5d742141ad54a235358f213f69 Mon Sep 17 00:00:00 2001
From: smokestacklightnin
<125844868+smokestacklightnin@users.noreply.github.com>
Date: Mon, 23 Sep 2024 16:25:57 -0700
Subject: [PATCH 22/31] Remove logo
---
mkdocs.yml | 1 -
1 file changed, 1 deletion(-)
diff --git a/mkdocs.yml b/mkdocs.yml
index 6daf47ca6..399112061 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -32,7 +32,6 @@ theme:
toggle:
icon: material/brightness-4
name: Switch to system preference
- logo: images/tf_full_color_primary_icon.svg
favicon: images/favicon.png
features:
From 946ef6098407ba5ca3a4b501bcc47a479cd5b915 Mon Sep 17 00:00:00 2001
From: smokestacklightnin
<125844868+smokestacklightnin@users.noreply.github.com>
Date: Mon, 23 Sep 2024 16:32:52 -0700
Subject: [PATCH 23/31] Fix broken link
---
g3doc/index.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/g3doc/index.md b/g3doc/index.md
index 37f11e6f1..5cdf9b9ae 100644
--- a/g3doc/index.md
+++ b/g3doc/index.md
@@ -555,7 +555,7 @@ ml-metadata (MLMD) | schema_version
The MLMD library has a high-level API that you can readily use with your ML
pipelines. See the
-[MLMD API documentation](../api/mlmd)
+[MLMD API documentation](api/mlmd.md)
for more details.
Check out
From 50ed6c1af802f335dc46bee0d1aa989dbbbdeac9 Mon Sep 17 00:00:00 2001
From: smokestacklightnin
<125844868+smokestacklightnin@users.noreply.github.com>
Date: Mon, 23 Sep 2024 17:22:07 -0700
Subject: [PATCH 24/31] Fix admonitions
---
g3doc/index.md | 20 +++++++++++---------
1 file changed, 11 insertions(+), 9 deletions(-)
diff --git a/g3doc/index.md b/g3doc/index.md
index 5cdf9b9ae..3b2904b37 100644
--- a/g3doc/index.md
+++ b/g3doc/index.md
@@ -484,10 +484,11 @@ the steps together within a single transaction, and the transaction is
rolled-back when an error occurs. The migration script is provided together with
any schema-change commit and verified through testing.
-Note: The migration DDLs in MySQL are not transactional. When using MySQL, there
-should only be a single connection with the upgrade migration enabled to use the
-old database. Take a backup of the database before upgrading to prevent
-potential data losses.
+!!! Note
+ The migration DDLs in MySQL are not transactional. When using MySQL, there
+ should only be a single connection with the upgrade migration enabled to use the
+ old database. Take a backup of the database before upgrading to prevent
+ potential data losses.
### Downgrade the database schema
@@ -509,11 +510,12 @@ metadata_store.downgrade_schema(connection_config,
downgrade_to_schema_version = 0)
```
-Note: When downgrading, MLMD prevents data loss as much as possible. However,
-newer schema versions might be inherently more expressive and hence a downgrade
-can introduce data loss. When using backends that do not support DDL
-transactions (e.g., MySQL), the database should be backed up before downgrading
-and the downgrade script should be the only MLMD connection to the database.
+!!! Note
+ When downgrading, MLMD prevents data loss as much as possible. However,
+ newer schema versions might be inherently more expressive and hence a downgrade
+ can introduce data loss. When using backends that do not support DDL
+ transactions (e.g., MySQL), the database should be backed up before downgrading
+ and the downgrade script should be the only MLMD connection to the database.
The list of `schema_version` used in MLMD releases are:
From 384405721cf741d35acfa5aeac135fcf8024558d Mon Sep 17 00:00:00 2001
From: smokestacklightnin
<125844868+smokestacklightnin@users.noreply.github.com>
Date: Mon, 23 Sep 2024 17:27:07 -0700
Subject: [PATCH 25/31] Add white background to png files
---
.github/workflows/cd-docs.yml | 2 +-
g3doc/stylesheets/extra.css | 4 ++++
requirements-docs.txt | 8 ++++++++
setup.py | 16 +++++++---------
4 files changed, 20 insertions(+), 10 deletions(-)
create mode 100644 requirements-docs.txt
diff --git a/.github/workflows/cd-docs.yml b/.github/workflows/cd-docs.yml
index 9a211fb34..36a0cb949 100644
--- a/.github/workflows/cd-docs.yml
+++ b/.github/workflows/cd-docs.yml
@@ -39,7 +39,7 @@ jobs:
mkdocs-material-
- name: Install Dependencies
- run: pip install mkdocs mkdocs-material mkdocstrings[python] griffe-inherited-docstrings mkdocs-autorefs black mkdocs-jupyter mkdocs-caption
+ run: pip install -r requirements-docs.txt
- name: Deploy to GitHub Pages
run: mkdocs gh-deploy --force
diff --git a/g3doc/stylesheets/extra.css b/g3doc/stylesheets/extra.css
index e734efefd..beaf9694e 100644
--- a/g3doc/stylesheets/extra.css
+++ b/g3doc/stylesheets/extra.css
@@ -13,3 +13,7 @@
width: 100%;
aspect-ratio: 16 / 9;
}
+
+p img{
+ background: white;
+}
diff --git a/requirements-docs.txt b/requirements-docs.txt
new file mode 100644
index 000000000..2c76c4f30
--- /dev/null
+++ b/requirements-docs.txt
@@ -0,0 +1,8 @@
+mkdocs
+mkdocs-material
+mkdocstrings[python]
+griffe-inherited-docstrings
+mkdocs-autorefs
+black
+mkdocs-jupyter
+mkdocs-caption
diff --git a/setup.py b/setup.py
index 55a642216..0bb2f9f0d 100644
--- a/setup.py
+++ b/setup.py
@@ -127,6 +127,11 @@ def run(self):
with open('README.md') as fp:
_LONG_DESCRIPTION = fp.read()
+# Get documentation build requirements
+with open("requirements-docs.txt", "r") as fp:
+ docs_reqs = fp.readlines()
+docs_reqs = [req.replace("\n", "") for req in docs_reqs]
+
setup(
name='ml-metadata',
version=__version__,
@@ -167,15 +172,8 @@ def run(self):
extras_require={
'lint': ['pre-commit'],
# TODO: Pin versions for docs
- "docs": ["mkdocs",
- "mkdocs-material",
- "mkdocstrings[python]",
- "griffe-inherited-docstrings",
- "mkdocs-autorefs",
- "black",
- "mkdocs-caption"
- ]
- }
+ "docs": docs_reqs
+ },
python_requires='>=3.9,<4',
packages=find_packages(),
include_package_data=True,
From a3adca30f5027f9a2faa2c31e8d5c756b0baea83 Mon Sep 17 00:00:00 2001
From: smokestacklightnin
<125844868+smokestacklightnin@users.noreply.github.com>
Date: Sat, 28 Sep 2024 23:34:04 -0700
Subject: [PATCH 26/31] Add build docs check on pull request
---
.github/workflows/cd-docs.yml | 25 +-
.gitignore | 4 +
g3doc/api/mlmd.errors/index.md | 45 --
g3doc/api/mlmd.errors/mlmd.errors.md | 5 -
g3doc/api/mlmd.proto/index.md | 35 --
g3doc/api/mlmd.proto/mlmd.proto.md | 5 -
g3doc/api/mlmd/index.md | 23 -
g3doc/api/mlmd/mlmd.md | 5 -
g3doc/images/favicon.png | Bin 404 -> 0 bytes
g3doc/images/mlmd_flow.png | Bin 50049 -> 0 bytes
g3doc/images/mlmd_overview.png | Bin 55112 -> 0 bytes
g3doc/images/tf_full_color_primary_icon.svg | 1 -
g3doc/index.md | 570 --------------------
g3doc/javascripts/mathjax.js | 19 -
g3doc/stylesheets/extra.css | 19 -
mkdocs.yml | 8 +-
16 files changed, 21 insertions(+), 743 deletions(-)
delete mode 100644 g3doc/api/mlmd.errors/index.md
delete mode 100644 g3doc/api/mlmd.errors/mlmd.errors.md
delete mode 100644 g3doc/api/mlmd.proto/index.md
delete mode 100644 g3doc/api/mlmd.proto/mlmd.proto.md
delete mode 100644 g3doc/api/mlmd/index.md
delete mode 100644 g3doc/api/mlmd/mlmd.md
delete mode 100644 g3doc/images/favicon.png
delete mode 100644 g3doc/images/mlmd_flow.png
delete mode 100644 g3doc/images/mlmd_overview.png
delete mode 100644 g3doc/images/tf_full_color_primary_icon.svg
delete mode 100644 g3doc/index.md
delete mode 100644 g3doc/javascripts/mathjax.js
delete mode 100644 g3doc/stylesheets/extra.css
diff --git a/.github/workflows/cd-docs.yml b/.github/workflows/cd-docs.yml
index 36a0cb949..eaeb2a2bc 100644
--- a/.github/workflows/cd-docs.yml
+++ b/.github/workflows/cd-docs.yml
@@ -2,9 +2,9 @@ name: deploy-docs
on:
workflow_dispatch:
push:
- # Uncomment these lines before merge
- # branches:
- # - master
+ branches:
+ - master
+ pull_request:
permissions:
contents: write
jobs:
@@ -14,15 +14,10 @@ jobs:
- name: Checkout repo
uses: actions/checkout@v4
- - name: Configure Git Credentials
- run: |
- git config user.name github-actions[bot]
- git config user.email 41898282+github-actions[bot]@users.noreply.github.com
-
- - name: Set up Python 3.10
+ - name: Set up Python 3.12
uses: actions/setup-python@v5
with:
- python-version: '3.10'
+ python-version: '3.12'
cache: 'pip'
cache-dependency-path: |
setup.py
@@ -42,4 +37,12 @@ jobs:
run: pip install -r requirements-docs.txt
- name: Deploy to GitHub Pages
- run: mkdocs gh-deploy --force
+ if: (github.event_name != 'pull_request')
+ run: |
+ git config user.name github-actions[bot]
+ git config user.email 41898282+github-actions[bot]@users.noreply.github.com
+ mkdocs gh-deploy --force
+
+ - name: Build docs to check for errors
+ run: mkdocs build --verbose
+ if: (github.event_name == 'pull_request')
diff --git a/.gitignore b/.gitignore
index ed7d69318..63e111c79 100644
--- a/.gitignore
+++ b/.gitignore
@@ -174,3 +174,7 @@ cython_debug/
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
+
+# Bazel build files
+bazel-*
+
diff --git a/g3doc/api/mlmd.errors/index.md b/g3doc/api/mlmd.errors/index.md
deleted file mode 100644
index 04562c7fa..000000000
--- a/g3doc/api/mlmd.errors/index.md
+++ /dev/null
@@ -1,45 +0,0 @@
-# mlmd.errors
-
-Exception types for MLMD errors.
-
-## Classes
-
-[`class AbortedError`][ml_metadata.errors.AbortedError]: The operation was aborted, typically due to a concurrent action.
-
-[`class AlreadyExistsError`][ml_metadata.errors.AlreadyExistsError]: Raised when an entity that we attempted to create already exists.
-
-[`class CancelledError`][ml_metadata.errors.CancelledError]: Raised when an operation or step is cancelled.
-
-[`class DataLossError`][ml_metadata.errors.DataLossError]: Raised when unrecoverable data loss or corruption is encountered.
-
-[`class DeadlineExceededError`][ml_metadata.errors.DeadlineExceededError]: Raised when a deadline expires before an operation could complete.
-
-[`class FailedPreconditionError`][ml_metadata.errors.FailedPreconditionError]: Raised when the system is not in a state to execute an operation.
-
-[`class InternalError`][ml_metadata.errors.InternalError]: Raised when the system experiences an internal error.
-
-[`class InvalidArgumentError`][ml_metadata.errors.InvalidArgumentError]: Raised when an operation receives an invalid argument.
-
-[`class NotFoundError`][ml_metadata.errors.NotFoundError]: Raised when a requested entity was not found.
-
-[`class OutOfRangeError`][ml_metadata.errors.OutOfRangeError]: Raised when an operation iterates past the valid input range.
-
-[`class PermissionDeniedError`][ml_metadata.errors.PermissionDeniedError]: Raised when the caller does not have permission to run an operation.
-
-[`class ResourceExhaustedError`][ml_metadata.errors.ResourceExhaustedError]: Some resource has been exhausted.
-
-[`class StatusError`][ml_metadata.errors.StatusError]: A general error class that cast maps Status to typed errors.
-
-[`class UnauthenticatedError`][ml_metadata.errors.UnauthenticatedError]: The request does not have valid authentication credentials.
-
-[`class UnavailableError`][ml_metadata.errors.UnavailableError]: Raised when the runtime is currently unavailable.
-
-[`class UnimplementedError`][ml_metadata.errors.UnimplementedError]: Raised when an operation has not been implemented.
-
-[`class UnknownError`][ml_metadata.errors.UnknownError]: Raised when an operation failed reason is unknown.
-
-## Functions
-
-[`exception_type_from_error_code(...)`][ml_metadata.errors.exception_type_from_error_code]: Returns error class w.r.t. the error_code.
-
-[`make_exception(...)`][ml_metadata.errors.make_exception]: Makes an exception with the MLMD error code.
diff --git a/g3doc/api/mlmd.errors/mlmd.errors.md b/g3doc/api/mlmd.errors/mlmd.errors.md
deleted file mode 100644
index 3c1dada6c..000000000
--- a/g3doc/api/mlmd.errors/mlmd.errors.md
+++ /dev/null
@@ -1,5 +0,0 @@
-# mlmd.errors
-
-::: ml_metadata.errors
- options:
- show_if_no_docstring: true
diff --git a/g3doc/api/mlmd.proto/index.md b/g3doc/api/mlmd.proto/index.md
deleted file mode 100644
index a7cf83f20..000000000
--- a/g3doc/api/mlmd.proto/index.md
+++ /dev/null
@@ -1,35 +0,0 @@
-# mlmd.proto
-
-ML Metadata proto module.
-
-## Classes
-
-[`class Artifact`][ml_metadata.proto.Artifact]: An artifact represents an input or an output of individual steps in a ML workflow, e.g., a trained model, an input dataset, and evaluation metrics.
-
-[`class ArtifactType`][ml_metadata.proto.ArtifactType]: A user defined type about a collection of artifacts and their properties that are stored in the metadata store.
-
-[`class Association`][ml_metadata.proto.Association]: An association represents the relationship between executions and contexts.
-
-[`class Attribution`][ml_metadata.proto.Attribution]: An attribution represents the relationship between artifacts and contexts.
-
-[`class ConnectionConfig`][ml_metadata.proto.ConnectionConfig]: A connection configuration specifying the persistent backend to be used with MLMD.
-
-[`class Context`][ml_metadata.proto.Context]: A context defines a group of artifacts and/or executions.
-
-[`class ContextType`][ml_metadata.proto.ContextType]: A user defined type about a collection of contexts and their properties that are stored in the metadata store.
-
-[`class Event`][ml_metadata.proto.Event]: An event records the relationship between artifacts and executions.
-
-[`class Execution`][ml_metadata.proto.Execution]: An execution describes a component run or a step in an ML workflow along with its runtime parameters, e.g., a Trainer run, a data transformation step.
-
-[`class ExecutionType`][ml_metadata.proto.ExecutionType]: A user defined type about a collection of executions and their properties that are stored in the metadata store.
-
-[`class FakeDatabaseConfig`][ml_metadata.proto.FakeDatabaseConfig]: An in-memory database configuration for testing purpose.
-
-[`class MetadataStoreClientConfig`][ml_metadata.proto.MetadataStoreClientConfig]: A connection configuration to use a MLMD server as the persistent backend.
-
-[`class MySQLDatabaseConfig`][ml_metadata.proto.MySQLDatabaseConfig]: A connection configuration to use a MySQL db instance as a MLMD backend.
-
-[`class ParentContext`][ml_metadata.proto.ParentContext]: A parental context represents the relationship between contexts.
-
-[`class SqliteMetadataSourceConfig`][ml_metadata.proto.SqliteMetadataSourceConfig]: A connection configuration to use a Sqlite db file as a MLMD backend.
diff --git a/g3doc/api/mlmd.proto/mlmd.proto.md b/g3doc/api/mlmd.proto/mlmd.proto.md
deleted file mode 100644
index e190a876c..000000000
--- a/g3doc/api/mlmd.proto/mlmd.proto.md
+++ /dev/null
@@ -1,5 +0,0 @@
-# mlmd.proto
-
-::: ml_metadata.proto
- options:
- show_if_no_docstring: true
diff --git a/g3doc/api/mlmd/index.md b/g3doc/api/mlmd/index.md
deleted file mode 100644
index 4a1890b72..000000000
--- a/g3doc/api/mlmd/index.md
+++ /dev/null
@@ -1,23 +0,0 @@
-# mlmd
-
-Init module for ML Metadata.
-
-## Modules
-
-[`errors`][ml_metadata.errors] module: Exception types for MLMD errors.
-
-[`proto`][ml_metadata.proto] module: ML Metadata proto module.
-
-## Classes
-
-[`class ListOptions`][ml_metadata.ListOptions]: Defines the available options when listing nodes.
-
-[`class MetadataStore`][ml_metadata.MetadataStore]: A store for the metadata.
-
-[`class OrderByField`][ml_metadata.OrderByField]: Defines the available fields to order results in ListOperations.
-
-## Functions
-
-[`downgrade_schema(...)`][ml_metadata.downgrade_schema]: Downgrades the db specified in the connection config to a schema version.
-
-
diff --git a/g3doc/api/mlmd/mlmd.md b/g3doc/api/mlmd/mlmd.md
deleted file mode 100644
index 7ac8b930f..000000000
--- a/g3doc/api/mlmd/mlmd.md
+++ /dev/null
@@ -1,5 +0,0 @@
-# mlmd
-
-::: ml_metadata
- options:
- show_submodules: false
diff --git a/g3doc/images/favicon.png b/g3doc/images/favicon.png
deleted file mode 100644
index 00a8af6dabc6bd2705678240ab4a59398f490e80..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001
literal 404
zcmV;F0c-w=P)bWUp}xZee%_FF+E28Gr&LzQXejWB
zFF+E26#x_I#XCg22?r-%Yj0PYEp7nRKcP5}NPfXW>d
zAQS*X4L}sYk;G^K2?3k{u>_C=$Zl&HU|oxb9nw&M7hvPP797D5^YtB!^1A>lE_VEA
zWtT8;3G)qN)hY)JtNaxtw(!6H4J$lDHv|41;8=iuIsgd)i~wGMGznl`3oC$`4v;qk
yX22l>+=$W4)dl4JFr1FvuXH>&TgG!!vezfc^{4$!^Uogu0000>?2^c<1p0!RxuE
z`|7V_j;k6a^hm__csy=6pRYJl=J3ut=^MYJizG|xx3U&U9et#5W2g>Q`?S$i84u`V
zXiuUpvw%V4bscVK-Ehuz*290CxOR*}?2fAmSYz%ZxnYTEkxy4cgXm&iE@IsaxlqMbvzN3-L#jHWFs
z&grc`1Zv*KOof`%ZjQ2@dea31~F0>tl
z`zT<7!M+%0$KO}O>~p@g8v{T43WGuF>pxyw@&4mh-f@k<<$G$g)r#sr>lg*B2-dy>
zhYBjbJ^uMQCfM7Fq{FLmFnGx<1oSfbTxs-2D9@d(l?7k520QBWD`M>8JF86%frAU_Z7YL;?mwv1toBgQy?sSWTm3yfe6sk}
zZ(M)aEWI;}avBPpsXV!Htak@HO@t%Mg
zU?k*cg!mg7N4Q%}zdFesjNhWVM{4aXXz>nh
zepil+w+>z#Bl*mR>})h`620Q|SI{I@IBr~;O(~`?h7Rg)-EB~^jUU_?TzL>F9-Qct
z9=dT!ODt=p`4USl?j+SJ6f;*2X$ubO<$cgnfLd)37hAUsY7iMbd-R`X?0k3Mc38d-
zsa=)gj9+Oj!vDkre&;yVmvyOD62HAv*tz_9XSua_2Xx)~#^U-5eOVQ`yWO6QYBj`!
z&+l&My`LGV%J2%Fl*I-tO=f=W4;H$;7XQ&?%_7GOBj?Kef$TWe5@${{z}2-$gQZv<
ze1}lJK60yddelPL`j8_mZlGkNWkxi^!_>?sg!gvKk9DqVgDCfLXzlxr1!D2RTa)hk
z1m08I!F_o0m5>nJEGBB(MiM7A=6w(2NnaLxy&Hu3S8^
zjkmiYVp)esvbD?!yp)ma6OxObSQP^GL8t?>!7E+7e%k?)?oH(Sa{u+MZ(6L=?iL}X
z0f-C%7v;FL72g>xzA~Fx>PGuc`#4IHwDl%1w|@n`PRjB!8`yItovoT#lw2=?;9$$P
z-d$Aim_w|`_5r3pxVTp!ax3{q+P3gpxr%DHs}{0f^MF5J;tXxU69Gu~8-xy4ZOu?K
z5=;6Z(b`uOp~(p`Op?kcqViuEXt90KC@4-M>+W-CZNrtzJR>ljpHzq&)R}y5Df@a#
z@s$5c4U!XED^k^YXmeatpF$bRwAm29)waHIV=EN%yn!q-wA$!e6FyMJ8p_;1$^H!c
zgj~lE02e@t8_!B`F7|98HC-0G#n)P-3R8XRl=>j;jk+5?AWF>XW#9Cbtv|4(gA&Wi
zGJ=Xn6bsEuC0Azqw{;hN{YkKdxUJwG$LA;`XUnu$lqCC+R#fd|Q}SY|{xbAK*lu`5
zplu(^TP+=B#-;A98S`4{11`lK|Uq67cRkQi0aoTMwQ=KK6#%3@mgHfRb*>`4n8)^N1s@0%vBNBT(%T{xfr!b`mUvXuy;yR
zZ&m^>5NC&Q)UF|0B^}z>-*&_%1G!w8CF{T5);%{JSu~-f>HMzVVtwUy$NJGGysdN(
zLIcD!>LReCoR8RayuZCYzunOnzuiQKDxVxI5`Sn|-D}yy~%iap>pzt3KAub(D
za2%tLlb*#Fbt(-+5Uj1nA!k|9QCeuyP-5`n+yFm{U$c<In@`a7a@k$LR;S3
z&dPdI7e^z0x=0FsPDn##nk)0gX`3Q^yd%z0czH`2nbr!fgAO@xED8-&%fIaNjyTMY
z9_wprG5cxC!yyTs4@>Ih?sn8Fi-O5dN9I+#5u6jsKEF?9aHVwkhLySD>d31N7L~Q)
zJIZPn;01G1vYkUdrVWS8KRk1I3Hp5YMQ$fsoV-}66$7odAztLqM8@H-qXYZ=>XV~L
zT|NxTB5-^9(ye*tTJIQprK?Ee8)ov%(>~LSX8P)0c7`N*t?xR8GI=gV*#$-u(qT#}
z50UbV5j#yHgrHY$){EZJfCcr*){bGxNS!kA+@O`->W$Hhkao0^S*V*e#Whu;NGZUQ
zcyVfGlHPo!5IyBK6w-2ijmmXzv0Tdn+_RE|cs=qm1!Fos+LSqIvjF99-HM_pj+XUz
z_^LZpGSLb>`Gl5bJHt(8f%PtBOKdAn+>{Yc=)_xqO$6Q~kKUly>~Z9KCTLHZeoo+S
z#zmTUp=U1nNT)rSw6}e@K3|G6!sHHwVdla>j3{QU<=3Iioh|X^km=I=$!LeO?uu{L
zpQdhE{}AiNnm#~UF2vMboeg9*9fKHZ$1@|H>zu<*6e(#IIanW+iJc4)RCs1`c3NEL
zk0XQRpt7QD@#ke}QN}%vFX<^@;~h;3qGw%N}ov
z90szte4}wa>ocTv8-2%uUf=wvzy;{_o?4HzoT!>I@r=zC$-TIF))%8mok=8W+$@v=
zmWsDW;5GijBJ3u@p*?=$$+gh2DxWVWP7u;=)M~y95{b08j^p|
z-#+_RVe<>0ni->fTHaQ7#{1+jTf@Uz(59RIN+7KwrS*+V+Fk(}t1}LSL){?-UwAE8
zt}m}S%uPv9>d<
zNcl3?dkUO!5X1VyjBtzDy!#+nud6L8ck)RyzS+HjJdUh#O1&I(basPlx^!l
zI`cZ45eN8ds^bjPasDF|SH$)pHncHRFKNGab;i9*^a&X(wy(w`)R}d*D;+o*r^QS{
z$C^HolsMi`Utz1#^@q9~e7gpn2woZ)3&j-I7;l0~xso*_y4|`13IiK2SnW(Le8T3j
z5W45wm)EkA7-cr@AZAKsf$W}N>yFA;ouwP7npj+3-nv80>tv)lTJHzUM#+LL-$@Ty
zlXtAA4!dTn=tI
za5#X`{{xl(4Tt{|mH(S2{0C|HyFtIq1MJ;b^g#Z9G3fu5UN91X8hG|!<4in~?(<(P
z`Zaz3wCJB#|9a5>M1KH^)6-$a`kT1?zfahIH4iYTOz|%U_CFc){|g`czghYJZ6FK;
zSvo>6l^85jJs(&2fp2uGpvtXZbJe*ucVuXrIwbj!m2j4Y@Utuu1Ck)}xflnz+Im_x
z%UE$h48tUn&rbxz5LvYlDi})A5iuGYZ8Q-gBp*vnVsdWMAGGzfzQ0A*pSfC%8+{EvR{K|d?3EZCU<|4?mk{L5E8Vfu@EH9
zx%YSR#D_7$d>*qKyqSEwkv${j|5-52(MvsM(5~*Y&a)2qzvqehH7GkB!?S~);bE|R(LGaX7x)aQ?%o5@cTRHaoO~%f7#NQ@c&g10Nh
zGcB`TW8m^uYaS)(yTtVd9t(2+8Z#?zeSMtvS1elw3&Ry_U1w`60-s&e5eK~&pB2h^
zV=;G{3-83
zF~YuE)55NXB{@FPkx+h2qE+is!PXYW=5Xo5rf-@lPz|*8WGILs2*UlRPrQr7B-;^$
z(4vn+i;0HMidEf8PveIU3=vh|Iz0^i9#%+z)y+c?Cz<(14m?96}9L;Xt1O6
z-`(jqe|Q8$47-W)l;M=OG|d&UAM^+2^WG8t7jvEjv8oqsjd47V+4=bv
zR$Bp9X6@W=)7JSBrP)k_z7iXjyUm$(s18Uwq%qv2S#xd_?@}r0bMOUWE}#ma|Mw*3$eV<}f$fT>0u0
ze}MYwowrp+eNWASIxB3o(nw(1o}YB2^MBQb(R}&TZrYRRVcQna0lv;i2nCT3{ONt^
zMW$#5g6a!BeV=jHQmuj&H!)^3NyYu{0K=Bb%LvS!jydHhIs$?CGDF*A=X^c+8mnsF
z9kZ*p+s{2hLA;p+9}!(P_sjKgP%}3>=v{9EusUnz}z{Wm>tB*Mux(
zBC!1o`)zqc`a)_-zIh?=--{;I?N
z>8%X4NTlpYDFqo}WhsG{X`5h3a>!KT@{-v&JS|gjfS^<{uL|$TXjcFpW~@+u)$raQ
zix#&S;Y)7Q!fo;FLuw3C$^=>@;?+CxL);N-ITA?-JUrI(wmePrDyw?VQ!eI^Uj
zy<72ml(8dq;v-j+Q0Ps6*kt9=f}t0sZueUqZsX#qa}^(x$BhPTV^bp|vpf6-Ty?Kj
zVZ8?IPqPjP;tgd3&1OU2om9zy9qPa|8g7^H=4y
zi;s_#snY#ayzF+oE#n{tm;`hc>i7?ZC;v^fcBD
zAe&IoZ%H$=`2pr)=003im7=(skL&yJ!G=iQNJds3`*99*lngpov*6b_`9qfM%G1;f+
zd&JlKoB|Kvg*85H5e~VPl`0Y18Fqjy#c8|Vo$z2_1xs;jrK1D4CKR;ync1en^kp{Q
zOl{#;YTUaahKRteC8CyN6-}$PWZjGAJ#F~*`()-djyHXVH*Pw7KQ6v@m4#uxNprc_
zwX(#D;{&F4yBwXs-~{u2IriBT)??tG6i;!l)d*TH%@9?M|9WwQLl7BEh|AB)l4xx)
zybMXr{^Rw@@k9b4y*#FuOisoWMuPfoQ(X}m4AqmK?I5B(LF0#=OpSXG3;*9H
zM!rb$bd`gU5OWtt!6CYk?S+g^ocjL`J9ttG|gQ>Hc>LuU=)Y0ZCLT4?^z;T7kOGV$0
zA~u$(q%{)ikNG9Wa5|ILvb0E$TJ(T2*=&XTHwfWAdTxZq?{K1IuzZG4@e%bxwPXfA
zUK5T<%wUOkyvB{9f(tv@DQ?2a_803h*rlce{UrY`Zcjh{*pwGtx3d3p@e?BLst<%4YO6->8UyiF%rPD>sCo@r2*GvcgfDf_8g
zWurJoRk=Rv&2(DlHmT;14Wq)inuXXXw(rwTEPr!;-TLH`gZ!T?mCrN((9}C{FS2tXoni74bH3CGR-qPj&<-;-IydQRkv2B8hYa3UEwDd^oyjI(I
z6Vvi;6yskW+b#$$b@Si|mxyw|+2v*)b1ezPJj~IEQoa%af_2S@JHXtSSi_Y?%_sqI
ztAj;K)$t;NlV{B1fK>ymK|r|U2t#^G{Wryqu|cot>P-QEs7kV
zs<=*^Kg%1$a296D-0LDYaQuu8ln-N6mxENoqYtA==M_YIf|-66Iq;Ew)QDa|xC?34
zCk*ECxP)kD?L4oWk&4UQ=onJ|T`xiRy2^lr7{_2@iG%U>;NF;fO{;t)7rL+(Pd!}QQiPs?wW(4c6HguApq
z^@MF6vkDSKG}P7wTg?+*h%~*e#@h9>#nZ#}txuM?a`33#tGgyHOK9|@;^^pm=@I;1JcaUzrt;-+1EVXo-*cvJrrT>K_
zm4qp8i=p|XH
zL9tfz`(&;4plf;xPP|iYNnrW6ohh=O99Z5eSf1CBrg#}~767Y0O^ZJ?P_q6d>t_8{
z^oxWOV{LN&mQ2;R_iL)&X>^HZWH2$g*H%-UH*}16+uc`vJJViV6sq6vX!3%)bY9@Y
zmVjv}M*Y;>I9NPE#`s9K?8Tgxp(HOukz=PU)A+xq4yC3VBo}&>Q{0?`242{I-2^##
zPHirW7RxB%)<-c_oue{OHe~dUbQk2s)t(XyG_F4B5-VGX8~3?f_^Hx(db9k}5#~LB
zv8yd~uCnW5N9_sUd1JF$B0y7E@ZD#gXLBg2QIS2DBvH+{KNw%@miJ%Cmvi$vB6jlp
z*{djN_
z9(6o{#x&NI%QOhip|TiDMfH5#Y?;c}=FZ3MCMA|`
zTHQB_P|*e>dJ9Ui0`$I2;&%~5l(}@UkJhXod;8Qr{P{q^M!y*AG3sX}O}8+{+>fvl
z@M*M+hk22V{HMagvkv+7)LMlC+k^{FAin$FRm~=1Q3Yr>WrBMGBe;s?ij^o;
zSo+#zvFlSvyTTg{b}RGBC6%O#<6iSRd~v>
z)ii>%xv2NN8Si|Mo`gBiZQliC@MV9Vb@dP*aBq
zU_SbLxAOY^-#|Oa{iDK4Pe#&zSP(zXURAF(N+@20&Zv&8>#(QF_z>^HlH8)K^^_YJ
z*?Xgz(S0X`N-!#|F(2TVk7-`k_wJ^gv)7RhG^!||$Z{(Js
zT<|CT!3&;{_Hx6_@0*bca$ZFF6MX6CRGgNGZ`otbu`G^Rjo`*_2b}WJ
zX)o=(sBc#!KjCs-*IkgmyJ--2(b`@GB%=Ud60%%|o}2S!G;VIleExw6i1?B|K2v
z;%sw&3TCZ
zy-=|}nb$CuV9SGU|LQYX;Q=N2>}roi59Yihc6X*tIxIFyri5yRRd;2i5FwlB=
zeS2e;mEdqBH5XyKkW9Sg`imsea>?uBB0`&+MRR@D?#-tJ+75r*kye{KXu4R9Y=2EU6J&5f2ZIMjG`@BudW8M`Qfgk=J
z&v_f_A5UaCYa=hKx|nEW{+I#frrd98eiRZF%GQJJFa6s-1O0x4e$Cmx
znx6j49gKS}y4M6nzdsw1EAZETg6@TTVIXqChQOawbSV*=W%G|WWFsma|JGN3H$xEVb(Mm
z=##rx;_|Q0K_P3cw%;4X0L|xD);@aAfd|da!}B1lUeWJy1z37Miy1%%zcL6Ab?fhw
z6dm7ZhRimYd~o5#m+eYMyE_9kbqGi|v>*SX;BhqM;A^R9?-*A6bHWO|c_*UQl~2=b2`l`Ozj>xM+w
z-KG15DBmxLS<*&dSHRe9oms6d66^azg*$y*-HY4t+~PcP?m@R#TYSYTAzkL1wgtT6
zL&YM%jQ?eCCeVv!Jrr=>%6smkQ0Ug@ogm{GrJ>cG0ARD%#(je#=KwJ)P>-9l-JW_p
zE-lbDu<~`KW#;7dUXRU^g!aJ8ezUgyDvQBHJZtF!^RA)4r1^_ZjSa~Pc010)A`K^f
zXV~SocoQ^OtAoe5%^X^pjRNGZD{}U$SF?9-2GA!q7HlogVj~5rf4;aop>LBM_u4g8
zbR=4BmdDE`$Qf%zK@Y8dcGa?tuFp{L3HiC4vb(guk?y|awW8%oB3`q4=QX7i9skOp
z>${!dk)_HY)>1x%54MU6tHwiXW)bx+A59yRaD$YWkz9ZOV>v2_GUuq
zO2$S@xtAhbHF849gZ-B`(Ue^!K;~H3zTV7v1#Bdfx%Tk3mSKmNQS*}*3w3E%kY
z@a$ADxMx)n8Fud3O~SR24`-0hhfkx|+@i^*P5bUSlQK3Z0!y2(Zu%tucykyn@4yXC
zF)Z={g6=*Ri?ym95uFv3f7zCyrPU9pe_cW}&sBc{$9ZQuoLA2x=UVkzEU&I^!dK(U{6cXWZoa
z_6~*LtjYg)=t`sgM*f_Yautl#T2!?u9RAH?DhxA>YKY1!DY(xd?cY4x{|tteexk%*
zBzd)m=|oAqRGvdo_RMpCh?!d(Ex_6(!BaCcJl59MN$%Bija}W{3X9uanL5WG9H<|f
z8)?c^q`ALee4Pb?Yf$IjgESs&yxNiTHlD<@^-FUVL!T=U#zD32sZ)^gq4O-P*0xp-CFZffG2Yy&za_s2UPPJuPb;2uqy7`RQu
zIyV|_3;;UfFlsi?)`u&W>h-lQ%oXVJ0t#%7EE?2wA*8(ZJNil3ylnZvg9_LBvqy?L
z<3{M+?AmT&K7PfYX8E=x8&P^r7JfQ*%X0rfV%-N1)`Td%M#N>S!RyR|-W6UX(FCdy
zHchjb$K*xf|#)t>X+G^;#LcoO5*rZK@m(i1N>mO(E&X1C8A_)AD{Ay5fq~zvR
zM@X+?&r4I=&cQQ3KTUlSJ8tx*V0<|Xc#9GkJoe^
z!lT$$`DLvH3-IfccUxu3Pkj|+P=t+N%ea!6n|sO~1M)BrfyuswaQaCONsQTzK_Nl|Hn(UK`^>2Zw&co{M%dbZ2Wn$s2MUr(5f)M(
zo;gh#ZirO;IiIAwg;-u(9DPq#8(QngM1WX2zD_Mu+&f!9r=OUbnI&kaLJY^=@_0vl
z0Cs44SP807rF_A#SMow4Flz}ZBz>ywaVXR#m>$xd#Ug@P&EgGG^fv
zk}zw5?19aSyHa_d!bon8`;5(=-|64Gv`OK(%^4;itc>#
z++5_NPVp5N`V_-f+xBG!MTG5)Hpw&{8DcQ}xx{&I=;LxdU?n-ES2d8xOAJy3fod`;7R6VY%GXwzDD$%FMtx&Yas28ipw9QBhBQ6s$Q16Lk|`Cd_nrd
zkzsh)OG0H^y5Zf=2kr7@#=#Sb7;c_}=(ZY|LP#7WpHiP-IQsdp;>}jC67z|TO1BIp
zR#R&|7Wb4^2O(+mZ0fUCQxdXh`VJxvO8DM5&si+CU}e^ddxPM)E$<*xW8+V0Lw-#)
zTNzx%laNi+r-;r(40||G;pyQfyo7V4QlZ2_5X39q0eouBuV0Mks{H}l)BY(dL$ydoqc`X-G;g<3X
zp9Hd
zHQX-!{=G`xgW8ez&Phb8*ua=+gEqQ{B3Hmu1-51kULqry$*YYEBzdLYr_*!nG!z<&
zIn4>r*qU}!^t+Zo;)<3u9F|M$u23{AmP=Ig9KDJ);(PoB9ygXVYTQcO2UPlQ2s_;>
z85&z=93Zy)0FXMeO8YxgVQqF
z?9Ggb&7vyf-PW-?&&E0w#%t;krDs*H?2sE#qDDB&0vWHt$bLUeLxWXmB~Q>i?xB+8
z_={+vesJs#Tez^Y)F_SvF=9tr!c3K
zC6$$k0ja3w#QMGhYAA+-z?jos9zG3EfPuwHDFvtQjUii%i@I-?~x55{HDhzCt*+U@58&MHay
zFI8;^y|sM(b@DNB$Hf5|b;~sW02&$eW5itGDl(*ZxdrVH)^EzJsh+znR79$DvW;`c
zN{j7+^IseP`!1G>5ngg{G-&zzF0VfjF-ucEeKi@mP?YT!)e`Ug;y~>n#IUgMP`9|g
zwWLg7HVX7T|L~W8(!6t0c82CG7nOn`bPiF#U9fNveVQdRNe`G{E%de$T#(ieiMe
zWv+9ERqj*1z9_{AW$grr-i0i#MEW?K!DJ16!XW1tChy5_Z)aj#I)u1fLPW1jE!9Gd
zTM;#14Vyj(AT!IP(@J~-+)HmT{LOuU4dd5#`NC2uS?pTU$CdgO`^u~b6-nJo6Q#YI
zCr(&oIDDQqcV~ILTQqiQDNSE$ccJZ>?;7|QvEJpEf3hhWgWluPdur+)%l#+orB%k?
zaY5s9|{fxSNnNXY|gh