diff --git a/docs-guides/.buildinfo b/docs-guides/.buildinfo index 9d2ff265a..38077bef5 100644 --- a/docs-guides/.buildinfo +++ b/docs-guides/.buildinfo @@ -1,4 +1,4 @@ # Sphinx build info version 1 # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. -config: a0cc5a057fb0f76f4409d50e75aca1b6 +config: 8b12fed34ce7966d0db3a29cbeb3e84c tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/docs-guides/_sources/source/mlmodel-utilities.md b/docs-guides/_sources/source/mlmodel-utilities.md index 606688da6..15f77fa77 100644 --- a/docs-guides/_sources/source/mlmodel-utilities.md +++ b/docs-guides/_sources/source/mlmodel-utilities.md @@ -219,7 +219,7 @@ An example how to update the output data types: ```python from coremltools.models.model import MLModel -from coremltools.utils import change_array_output_type +from coremltools.utils import change_input_output_tensor_type from coremltools.proto.FeatureTypes_pb2 import ArrayFeatureType model = MLModel("my_model.mlpackage") @@ -234,7 +234,7 @@ updated_model.save("my_updated_model.mlpackage") Another example is showing how to update data types of all the function inputs: ```python from coremltools.models.model import MLModel -from coremltools.utils import change_array_output_type +from coremltools.utils import change_input_output_tensor_type from coremltools.proto.FeatureTypes_pb2 import ArrayFeatureType model = MLModel("my_model.mlpackage") @@ -257,3 +257,85 @@ Optional arguments: Special values for `input_names` and `output_names` arguments: * an empty list means nothing will be modified (default for `input_names`) * a list containing `"*"` string means all relevant inputs/outputs will be modified (those that will match the `from_type` type) + +## Compute Plan + +In certain situations, you may want to evaluate the computational needs of a Core ML model before deploying it. +The `MLComputePlan` class is designed for this purpose, allowing you to get insights into the resources and costs +associated with using the model. + +Here’s what you can do with `MLComputePlan`: +- Model Structure: Examine the model structure. +- Compute Device Usage: Get insights into the compute devices that would be used for executing an ML Program operation/ NeuralNetwork layer. +- Estimated Cost: Get the estimated cost of executing an ML Program operation. + +An example on how to use `MLComputePlan` to get the estimated cost and compute device usages for the operations in an ML Program: + +```python +import coremltools as ct +# Path to the compiled ML Program model. +compiled_model_path = "my_model.mlmodelc" +# Load the compute plan of a model. +compute_plan = ct.models.MLComputePlan.compute_plan.load_from_path( + path=compiled_model_path, + compute_units=ct.ComputeUnits.ALL, +) +# Get the model structure. +program = compute_plan.model_structure.program +mainFunction = program.functions["main"] +for operation in mainFunction.block.operations: + # Get the compute device usage for the operation. + compute_device_usage = ( + compute_plan.get_compute_device_usage_for_mlprogram_operation(operation) + ) + # Get the estimated cost of executing the operation. + estimated_cost = compute_plan.get_estimated_cost_for_mlprogram_operation(operation) +``` + +## In-memory Model +If you are using an in-memory model in your application, you can easily test the workflow with `MLModelAsset`. The `MLModelAsset` class includes +the `MLModelAsset.from_memory` API, which enables you to load a model directly from the model's in-memory specification data. Once loaded, you +can use the model to make predictions. + +An example on how to use `MLModelAsset` to load an `MLCompiledModel` from in-memory specification data: + +```python +import coremltools as ct +# Path to the model. +model = MLModel("my_model.model") +model_spec = model.get_spec() +spec_data = model_spec.SerializeToString() +asset = ct.models.model.MLModelAsset.from_memory(spec_data=spec_data) +compiled_model = ct.models.CompiledMLModel.from_asset(asset=asset) +result = compiled_model.predict( + { + "x": np.array([1.0]), + "y": np.array([2.0]), + } +) +``` + +Another example on how to use `MLModelAsset` to load a MLCompiledModel from in-memory specification data where the specification has external blob file references : + + +```python +import coremltools as ct +# Path to the model. +mlmodel = MLModel("my_model.mlpackage") +weight_file_path = mlmodel.weights_dir + "/weight.bin" +with open(weight_file_path, "rb") as file: + weights_data = file.read() + model_spec = model.get_spec() + spec_data = model_spec.SerializeToString() + # Provide the weights data as `blob_mapping`. + asset = ct.models.model.MLModelAsset.from_memory( + spec_data=spec_data, blob_mapping={"weights/weight.bin": weights_data} + ) + compiled_model = ct.models.CompiledMLModel.from_asset(asset=asset) + result = compiled_model.predict( + { + "x": np.array([1.0]), + "y": np.array([2.0]), + } + ) +``` \ No newline at end of file diff --git a/docs-guides/_static/documentation_options.js b/docs-guides/_static/documentation_options.js index acc0bfca8..b9c8bddb4 100644 --- a/docs-guides/_static/documentation_options.js +++ b/docs-guides/_static/documentation_options.js @@ -1,5 +1,5 @@ const DOCUMENTATION_OPTIONS = { - VERSION: '7.0', + VERSION: '8.1', LANGUAGE: 'en', COLLAPSE_INDEX: false, BUILDER: 'html', diff --git a/docs-guides/genindex.html b/docs-guides/genindex.html index 3e9571215..e4251ba3a 100644 --- a/docs-guides/genindex.html +++ b/docs-guides/genindex.html @@ -40,7 +40,7 @@ - + @@ -908,7 +908,7 @@

X

diff --git a/docs-guides/index.html b/docs-guides/index.html index 254220275..248c5d7d8 100644 --- a/docs-guides/index.html +++ b/docs-guides/index.html @@ -41,7 +41,7 @@ - + @@ -532,7 +532,7 @@

Core ML Tools - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/search.html b/docs-guides/search.html index ac2426a8e..0c2a78db7 100644 --- a/docs-guides/search.html +++ b/docs-guides/search.html @@ -39,7 +39,7 @@ - + @@ -418,7 +418,7 @@

Search

diff --git a/docs-guides/searchindex.js b/docs-guides/searchindex.js index bf4b49169..09afe2fce 100644 --- a/docs-guides/searchindex.js +++ b/docs-guides/searchindex.js @@ -1 +1 @@ -Search.setIndex({"alltitles": {"API Overview": [[47, null], [52, null], [57, null]], "API Reference": [[0, null], [21, "api-reference"]], "APIs for each workflow": [[63, "apis-for-each-workflow"]], "Accuracy Benchmarks": [[51, "accuracy-benchmarks"]], "Accuracy data": [[56, "accuracy-data"]], "Activation Quantization": [[58, "activation-quantization"]], "Add Image Preprocessing Options": [[24, "add-image-preprocessing-options"]], "Additional Resources": [[64, "additional-resources"]], "Algorithm Runtime": [[42, "algorithm-runtime"], [42, "id3"]], "Applications": [[67, "applications"]], "Availability of ML Programs": [[16, "availability-of-ml-programs"]], "Availability of features": [[43, "availability-of-features"]], "Available Only for Neural Networks": [[21, null]], "Bisect Model": [[30, "bisect-model"]], "Body Pose Example": [[78, "body-pose-example"]], "Build From Source": [[25, "build-from-source"]], "Calibration data based Pruning (SparseGPT)": [[52, "calibration-data-based-pruning-sparsegpt"]], "Calibration data based compression": [[42, "calibration-data-based-compression"]], "Calibration data based quantization": [[57, "calibration-data-based-quantization"], [57, "id3"]], "Capture the PyTorch Graph": [[13, "capture-the-pytorch-graph"]], "Change Model Tensor Input/Output Types": [[30, "change-model-tensor-input-output-types"]], "Choosing Custom Names for Input and Outputs": [[20, "choosing-custom-names-for-input-and-outputs"]], "Choosing an algorithm": [[51, "choosing-an-algorithm"]], "Choosing the Appropriate Precision": [[71, "choosing-the-appropriate-precision"]], "Classifiers": [[1, null], [18, "classifiers"]], "Combining Compression Types": [[41, null]], "Combining compression types on a Torch model": [[41, "combining-compression-types-on-a-torch-model"]], "Combining compression types on an mlpackage": [[41, "combining-compression-types-on-an-mlpackage"]], "Combining models: toy example with LoRA adapters": [[37, "combining-models-toy-example-with-lora-adapters"]], "Compare the Outputs": [[72, "compare-the-outputs"]], "Comparing ML Programs and Neural Networks": [[2, null]], "Composite Operators": [[3, null]], "Composite and Custom Operators": [[18, "composite-and-custom-operators"]], "Compressing Neural Network Weights": [[65, null]], "Compression info protocol": [[40, "compression-info-protocol"]], "Compute Precision Parameter": [[72, null]], "Conclusions": [[42, "conclusions"]], "Contributing": [[23, null]], "Contributions": [[23, "contributions"]], "Control Which Layers are Quantized": [[65, "control-which-layers-are-quantized"]], "Conversion": [[40, null]], "Conversion Options": [[4, null], [18, "conversion-options"], [28, null]], "Convert All Double Multi-array Feature Descriptions to Float": [[30, "convert-all-double-multi-array-feature-descriptions-to-float"]], "Convert From TensorFlow 1": [[28, "convert-from-tensorflow-1"]], "Convert From TensorFlow 2": [[28, "convert-from-tensorflow-2"]], "Convert MIL to Core ML": [[33, "convert-mil-to-core-ml"]], "Convert Models to ML Programs": [[16, null]], "Convert Models to Neural Networks": [[17, null]], "Convert PyTorch models with quantized weights and activations": [[40, "convert-pytorch-models-with-quantized-weights-and-activations"]], "Convert a Dynamic Model to a Static One": [[6, "convert-a-dynamic-model-to-a-static-one"]], "Convert a Keras Model With Subclassing": [[70, "convert-a-keras-model-with-subclassing"]], "Convert a Pre-trained Model": [[69, "convert-a-pre-trained-model"], [70, "convert-a-pre-trained-model"]], "Convert a Sequential Model": [[70, "convert-a-sequential-model"]], "Convert a TensorFlow Concrete Function": [[70, "convert-a-tensorflow-concrete-function"]], "Convert a User-defined Model": [[70, "convert-a-user-defined-model"]], "Convert and Compare with Foat 32 Precision": [[72, "convert-and-compare-with-foat-32-precision"]], "Convert from PyTorch": [[28, "convert-from-pytorch"], [39, "convert-from-pytorch"]], "Convert from TensorFlow": [[39, "convert-from-tensorflow"]], "Convert models with jointly compressed weights": [[40, "convert-models-with-jointly-compressed-weights"]], "Convert models with palettized weights": [[40, "convert-models-with-palettized-weights"], [40, "id2"]], "Convert models with sparse weights": [[40, "convert-models-with-sparse-weights"], [40, "id1"]], "Convert the DistilBERT Transformer Model": [[15, "convert-the-distilbert-transformer-model"]], "Convert the Image to a Tensor": [[8, "convert-the-image-to-a-tensor"]], "Convert the Model": [[5, "convert-the-model"], [26, "convert-the-model"]], "Convert the Model and Preprocess an Audio File": [[6, "convert-the-model-and-preprocess-an-audio-file"]], "Convert the Model to Core ML": [[10, "convert-the-model-to-core-ml"], [11, "convert-the-model-to-core-ml"]], "Convert the Model to an ML Program": [[24, "convert-the-model-to-an-ml-program"], [72, "convert-the-model-to-an-ml-program"]], "Convert the Model to the ML Model Format": [[76, "convert-the-model-to-the-ml-model-format"]], "Convert the TF Hub BERT Transformer Model": [[15, "convert-the-tf-hub-bert-transformer-model"]], "Convert to Core ML": [[7, "convert-to-core-ml"], [8, "convert-to-core-ml"], [13, "convert-to-core-ml"]], "Convert to ML Program or Neural Network": [[38, "convert-to-ml-program-or-neural-network"]], "Converting Deep Learning Models": [[9, null]], "Converting TensorFlow 2 BERT Transformer Models": [[15, null]], "Converting Torch models to Core ML": [[52, "converting-torch-models-to-core-ml"]], "Converting a BERT Transformer Model": [[70, null]], "Converting a Natural Language Processing Model": [[10, null]], "Converting a PyTorch Segmentation Model": [[5, null]], "Converting a TensorFlow 1 DeepSpeech Model": [[6, null]], "Converting a TensorFlow 1 Image Classifier": [[7, null]], "Converting a torchvision Model from PyTorch": [[8, null]], "Converting an Open Efficient Language Model": [[11, null]], "Converting from PyTorch": [[12, null]], "Converting from TensorFlow": [[14, null]], "Converting quantized PyTorch models to Core ML": [[57, "converting-quantized-pytorch-models-to-core-ml"]], "Converting the Palettized PyTorch Model": [[47, "converting-the-palettized-pytorch-model"]], "Converting to a Stateful Core ML Model": [[67, "converting-to-a-stateful-core-ml-model"]], "Core ML Preallocates the Default Shape": [[21, null]], "Core ML Specification": [[29, "core-ml-specification"]], "Core ML Tools": [[0, null]], "Core ML Tools 7": [[62, "core-ml-tools-7"]], "Core ML Tools 8": [[62, "core-ml-tools-8"]], "Core ML Tools API Overview": [[73, null]], "Core ML Tools FAQs": [[20, null]], "Core ML Tools Optimization APIs": [[62, "core-ml-tools-optimization-apis"]], "Core ML Tools Versions": [[20, "core-ml-tools-versions"]], "Core ML model support": [[2, null]], "Create a MIL Program": [[33, "create-a-mil-program"]], "Create an Updatable Pipeline Model": [[77, "create-an-updatable-pipeline-model"]], "Create the Base Model": [[76, "create-the-base-model"]], "Create the Classifier": [[75, "create-the-classifier"]], "Create the Nearest Neighbor Classifier": [[77, "create-the-nearest-neighbor-classifier"]], "Creating a Stateful Model in MIL": [[67, "creating-a-stateful-model-in-mil"]], "Custom LUT Function": [[65, "custom-lut-function"]], "Custom Layer Support": [[19, null]], "Custom Operators": [[19, null]], "Data free Pruning": [[52, "data-free-pruning"]], "Data-free Pruning": [[52, "id1"]], "Data-free compression": [[42, "data-free-compression"], [42, "id1"], [63, "data-free-compression"]], "Data-free quantization": [[57, "data-free-quantization"]], "Decompose into Existing MIL Operators": [[3, "decompose-into-existing-mil-operators"]], "Default Format": [[68, "default-format"]], "Define make_updatable": [[76, "define-make-updatable"]], "Deprecated Flag": [[34, null]], "Deprecated Methods and Support": [[39, "deprecated-methods-and-support"]], "Developer Workflow": [[19, "developer-workflow"]], "Difference from Tracing": [[31, "difference-from-tracing"]], "Differences in Detail": [[2, "differences-in-detail"]], "Differentiable K-Means": [[46, "differentiable-k-means"]], "Differentiable K-Means Palettization API Example": [[47, "differentiable-k-means-palettization-api-example"]], "Documentation": [[23, "documentation"]], "Download for the Following Example": [[28, null], [28, null]], "Download the Class Labels": [[8, "download-the-class-labels"]], "Download the Model": [[7, "download-the-model"], [26, "download-the-model"]], "Download the Model and Install TensorFlow 1": [[72, "download-the-model-and-install-tensorflow-1"]], "Effect of per channel scales": [[42, "effect-of-per-channel-scales"]], "Effect on Runtime performance on Apple Silicon": [[43, "effect-on-runtime-performance-on-apple-silicon"]], "Enable Unbounded Ranges": [[21, "enable-unbounded-ranges"]], "Encode the Sentence Fragment as Input": [[10, "encode-the-sentence-fragment-as-input"]], "Enumerated Shapes Provide a Performance Advantage": [[21, null]], "Enumerated Shapes with Multi-input Models": [[21, null]], "Error in Declaring Network or Computing NN Outputs": [[20, "error-in-declaring-network-or-computing-nn-outputs"]], "Evaluate Classifier, Regressor, and Transformer Models": [[30, "evaluate-classifier-regressor-and-transformer-models"]], "Example": [[30, "example"]], "Example: A Simple Accumulator": [[67, "example-a-simple-accumulator"]], "Example: Toy Attention Model with Stateful KV-Cache": [[67, "example-toy-attention-model-with-stateful-kv-cache"]], "Examples": [[18, null], [29, "examples"], [44, null]], "Export and Convert your Model": [[31, "export-and-convert-your-model"]], "Export as a Frozen Graph and Convert": [[69, "export-as-a-frozen-graph-and-convert"]], "Export the Model": [[11, "export-the-model"]], "ExportedProgram": [[13, "exportedprogram"]], "Exporting Limitations": [[31, null]], "Fast Predictions": [[34, "fast-predictions"]], "Feature Availability": [[48, null], [53, null]], "Feature Development": [[68, "feature-development"]], "Feed the Input Into the Model": [[6, "feed-the-input-into-the-model"]], "Find the Model Type in a Model Package": [[16, "find-the-model-type-in-a-model-package"]], "Fine-tuning based algorithm for quantizing weight and/or activations": [[56, "fine-tuning-based-algorithm-for-quantizing-weight-and-or-activations"]], "Fixing High Numerical Error": [[20, "fixing-high-numerical-error"]], "Flexible Input Shapes": [[18, "flexible-input-shapes"], [21, null]], "Float 16 Default": [[16, null]], "For Beginners": [[25, null]], "For More Information": [[13, null]], "For Neural Network Format Only": [[65, null]], "For a Quick Start": [[18, "for-a-quick-start"]], "For older deployment targets": [[39, null], [39, null]], "Format Differences": [[68, "format-differences"]], "Format Specification": [[68, "format-specification"]], "GPTQ algorithm for weight quantization (post-training data calibration)": [[56, "gptq-algorithm-for-weight-quantization-post-training-data-calibration"]], "Get Weights Metadata": [[30, "get-weights-metadata"]], "Get the Embedding Model": [[77, "get-the-embedding-model"]], "Get the protobuf spec": [[8, "get-the-protobuf-spec"]], "Getting Started": [[26, null]], "Granularity": [[48, "granularity"]], "Graph Passes": [[22, null]], "Grayscale Images and Float 16 Multiarrays": [[24, null]], "Grouped channel palettization": [[61, "grouped-channel-palettization"]], "Handling an Unsupported Op": [[20, "handling-an-unsupported-op"]], "How ASR Works": [[6, null]], "How Quantization Works": [[58, "how-quantization-works"]], "How to Compress": [[43, "how-to-compress"]], "How to Produce": [[68, "how-to-produce"]], "Image Filter Model": [[24, "image-filter-model"]], "Image Input and Output": [[24, null]], "Image Inputs": [[18, "image-inputs"]], "Image Prediction": [[34, "image-prediction"]], "Image Prediction for a Multi-array Model": [[34, "image-prediction-for-a-multi-array-model"]], "Image Preprocessing for Converting torchvision": [[20, "image-preprocessing-for-converting-torchvision"]], "ImageType Input and Output Example": [[24, "imagetype-input-and-output-example"]], "ImageType Now Supported for Output": [[24, null]], "ImageType for Input": [[24, "imagetype-for-input"]], "ImageType for Output": [[24, "imagetype-for-output"]], "Images for Input and Output": [[8, null]], "Impact on Latency": [[42, "impact-on-latency"]], "Impact on accuracy with different modes": [[56, "impact-on-accuracy-with-different-modes"]], "Import Libraries and Set Up the Model": [[10, "import-libraries-and-set-up-the-model"], [11, "import-libraries-and-set-up-the-model"]], "Import Statements": [[24, "import-statements"]], "Import and Convert the Pre-trained Model": [[3, "import-and-convert-the-pre-trained-model"]], "Input and Output Type Options": [[38, "input-and-output-type-options"]], "Input the Image and Make a Prediction": [[7, "input-the-image-and-make-a-prediction"]], "Install Core ML Tools": [[25, "install-core-ml-tools"]], "Install From Source": [[25, "install-from-source"]], "Install Third-party Packages": [[25, "install-third-party-packages"]], "Install Transformers": [[3, null]], "Install or Build Core ML Tools": [[25, "install-or-build-core-ml-tools"]], "Install/upgrade instructions": [[23, null]], "Installing Core ML Tools": [[25, null]], "Issues and queries": [[23, "issues-and-queries"]], "Joint compression": [[61, "joint-compression"]], "Joint palettization and quantization": [[41, "joint-palettization-and-quantization"], [41, "id1"]], "Joint sparsity and palettization": [[41, "joint-sparsity-and-palettization"], [41, "id3"]], "Joint sparsity and quantization": [[41, "joint-sparsity-and-quantization"], [41, "id2"]], "K-Means": [[46, "k-means"]], "Keras Conversion": [[20, "keras-conversion"]], "Labels": [[23, "labels"]], "Latency reduction with activation quantization": [[60, "latency-reduction-with-activation-quantization"]], "Latency reduction with pruning": [[60, "latency-reduction-with-pruning"]], "Learn More About Image Input and Output": [[34, null]], "Learn More about MIL": [[33, null]], "LibSVM": [[27, null]], "Linear Quantization": [[55, null]], "Load a Test Image": [[7, "load-a-test-image"]], "Load and Convert Model Workflow": [[28, null]], "Load and Save the MLModel": [[29, "load-and-save-the-mlmodel"]], "Load the Graph Definition": [[7, "load-the-graph-definition"]], "Load the MobileNetV2 Model": [[8, "load-the-mobilenetv2-model"]], "Load the Model and Image": [[5, "load-the-model-and-image"]], "Load the Spec and Apply its Settings": [[76, "load-the-spec-and-apply-its-settings"]], "Load the Test Image": [[8, "load-the-test-image"]], "ML Program Benefits": [[2, "ml-program-benefits"]], "ML Program Typed Tensors": [[71, "ml-program-typed-tensors"]], "ML Program with Typed Execution": [[18, "ml-program-with-typed-execution"]], "ML Programs and MIL": [[2, "ml-programs-and-mil"]], "ML Programs vs. Neural Networks": [[26, null]], "MLModel": [[0, null], [18, "mlmodel"]], "MLModel Overview": [[18, "mlmodel-overview"], [29, null]], "MLModel Utilities": [[18, "mlmodel-utilities"], [30, null]], "MagnitudePruner": [[51, "magnitudepruner"]], "Make Predictions": [[26, "make-predictions"]], "Make a Core ML Prediction": [[8, "make-a-core-ml-prediction"]], "Make a Prediction": [[72, "make-a-prediction"]], "Make a Prediction with Core ML and Print Outputs": [[8, "make-a-prediction-with-core-ml-and-print-outputs"]], "Make a Prediction with Torch and Print Outputs": [[8, "make-a-prediction-with-torch-and-print-outputs"]], "Make a PyTorch Prediction and Compare": [[8, "make-a-pytorch-prediction-and-compare"]], "Make a Source Model Prediction for Comparison": [[72, "make-a-source-model-prediction-for-comparison"]], "Make a Visual Comparison": [[72, "make-a-visual-comparison"]], "Make the Model Updatable": [[76, "make-the-model-updatable"]], "Methodology": [[46, "methodology"], [49, "methodology"], [51, "methodology"], [54, "methodology"]], "Methodology:": [[59, "methodology"]], "Migration Workflow (Core ML Tools 3 \u2192 4)": [[39, "migration-workflow-core-ml-tools-3-4"]], "Minimum Deployment Target": [[13, null], [68, "minimum-deployment-target"]], "Mix Tracing and Scripting": [[35, "mix-tracing-and-scripting"]], "Model Exporting": [[31, null]], "Model Info": [[49, "model-info"], [59, "model-info"]], "Model Input and Output Types": [[32, null]], "Model Intermediate Language": [[33, null]], "Model Intermediate Language (MIL)": [[18, "model-intermediate-language-mil"]], "Model Prediction": [[18, "model-prediction"], [34, null]], "Model Representations": [[2, "model-representations"]], "Model Scripting": [[35, null]], "Model Tracing": [[36, null]], "Model fine-tuning based compression": [[63, "model-fine-tuning-based-compression"]], "More Examples": [[3, "more-examples"], [69, "more-examples"]], "Multi-array Prediction": [[34, "multi-array-prediction"]], "Multifunction Models": [[37, null]], "Nearest Neighbor Classifier": [[75, null]], "Neural Engine With Flexible Input Shapes": [[20, "neural-engine-with-flexible-input-shapes"]], "Neural Network Classifier": [[76, null]], "Neural Network Untyped Tensors": [[71, "neural-network-untyped-tensors"]], "New Conversion Options": [[38, null]], "New Features": [[39, null]], "New in Core ML Tools 8": [[39, "new-in-core-ml-tools-8"]], "Normalize and Segment the Image": [[5, "normalize-and-segment-the-image"]], "Obtain the Original PyTorch Model": [[13, "obtain-the-original-pytorch-model"]], "Open the Model in Xcode": [[5, "open-the-model-in-xcode"], [78, "open-the-model-in-xcode"], [78, "id1"]], "Optimization": [[0, null], [18, "optimization"]], "Optimization Workflow": [[63, null]], "Optimizations for iOS15 / macOS12 and lower": [[62, null]], "Optimizing OPT Model": [[42, null]], "Optimizing ResNet50 Model": [[60, null]], "Optimizing StableDiffusion Model": [[61, null]], "Other Converters": [[0, null]], "Overview": [[0, null], [33, "overview"], [43, null], [53, null], [78, "overview"]], "Palettization": [[42, "palettization"], [45, null]], "Palettization Algorithms": [[46, null]], "Palettization Overview": [[48, null]], "Palettization using data free compression": [[60, "palettization-using-data-free-compression"]], "Palettization using fine tuning": [[60, "palettization-using-fine-tuning"]], "Palettization with PostTrainingPalettizer": [[61, "palettization-with-posttrainingpalettizer"]], "Palettizing CoreML Model": [[61, "palettizing-coreml-model"]], "Palettizing a Core ML model": [[47, "palettizing-a-core-ml-model"]], "Palettizing a Torch model": [[47, "palettizing-a-torch-model"]], "Per-channel scale": [[48, "per-channel-scale"]], "Performance": [[49, null], [54, null], [59, null]], "Performance Benchmarks": [[49, "performance-benchmarks"], [54, "performance-benchmarks"]], "Performance Benchmarks:": [[59, "performance-benchmarks"]], "Performance Improvements": [[68, "performance-improvements"]], "Pick the Compute Units for Execution": [[38, "pick-the-compute-units-for-execution"]], "Pipeline Classifier": [[77, null]], "Post-Training Palettization API example": [[47, "post-training-palettization-api-example"], [47, "id1"]], "Post-Training Pruning": [[51, "post-training-pruning"]], "Post-training (data calibration) activation quantization": [[56, "post-training-data-calibration-activation-quantization"]], "Post-training (data-free) weight quantization": [[56, "post-training-data-free-weight-quantization"]], "Post-training calibration data based compression": [[63, "post-training-calibration-data-based-compression"]], "Post-training data-free compression": [[63, "post-training-data-free-compression"]], "Predict From the Compiled Model": [[34, "predict-from-the-compiled-model"]], "Predictions Can Vary Slightly": [[7, null]], "Preprocess the Image Before Converting": [[7, "preprocess-the-image-before-converting"]], "Preprocess the Image Input for torchvision Models": [[8, "preprocess-the-image-input-for-torchvision-models"]], "Preprocessing for TensorFlow": [[24, "preprocessing-for-tensorflow"]], "Preprocessing for Torch": [[24, "preprocessing-for-torch"]], "Prerequisites": [[25, "prerequisites"]], "Preview a Classifier Model": [[1, "preview-a-classifier-model"]], "Preview the Model in Xcode": [[78, "preview-the-model-in-xcode"], [78, "id2"]], "Previous Versions": [[39, "previous-versions"]], "Previous releases": [[20, "previous-releases"]], "Produce a Classifier Model": [[1, "produce-a-classifier-model"]], "Produce a Neural Network": [[26, "produce-a-neural-network"]], "Provide the Shape of the Input": [[32, "provide-the-shape-of-the-input"]], "Pruning": [[50, null]], "Pruning APIs for Core ML model": [[52, "pruning-apis-for-core-ml-model"]], "Pruning APIs for Torch model": [[52, "pruning-apis-for-torch-model"]], "Pruning Algorithms": [[51, null]], "Pruning with SparseGPT": [[61, "pruning-with-sparsegpt"]], "PyTorch": [[18, "pytorch"]], "PyTorch Conversion": [[20, "pytorch-conversion"]], "PyTorch Conversion Workflow": [[13, null]], "PyTorch Formats": [[68, "pytorch-formats"]], "PyTorch quantization APIs": [[56, null]], "Quantization": [[42, "quantization"]], "Quantization Algorithms": [[56, null]], "Quantization Aware Training (QAT)": [[57, "quantization-aware-training-qat"]], "Quantization Granularity": [[58, "quantization-granularity"]], "Quantization Options": [[65, "quantization-options"]], "Quantization Overview": [[58, null]], "Quantization precision": [[58, "quantization-precision"]], "Quantize to 1-8 Bits": [[65, "quantize-to-1-8-bits"]], "Quantize to Float 16 Weights": [[65, "quantize-to-float-16-weights"]], "Quantizing the LUT": [[48, "quantizing-the-lut"]], "Quantizing weights": [[57, "quantizing-weights"], [57, "id1"]], "Quantizing weights and activations": [[57, "quantizing-weights-and-activations"], [57, "id2"]], "Recommended Format": [[70, null]], "Register the Function": [[3, null]], "Registering States for a PyTorch Model": [[67, "registering-states-for-a-pytorch-model"]], "Release Notes": [[39, "release-notes"]], "Rename a Feature": [[30, "rename-a-feature"]], "Requirements": [[5, "requirements"], [7, "requirements"], [8, "requirements"], [10, "requirements"], [11, "requirements"], [15, null], [15, null], [26, "requirements"], [31, "requirements"]], "Requires Xcode 13 or Newer": [[16, null]], "Reshape Frequency Optimization Hint": [[21, "reshape-frequency-optimization-hint"]], "Results": [[42, "results"], [42, "id2"], [46, "results"], [49, "results"], [51, "results"], [54, "results"], [59, "results"], [61, "results"]], "Run the Converted Core ML Model": [[10, "run-the-converted-core-ml-model"], [11, "run-the-converted-core-ml-model"]], "Run the Example": [[26, "run-the-example"]], "Run the PyTorch Model": [[10, "run-the-pytorch-model"], [11, "run-the-pytorch-model"]], "Save ML Programs as Model Packages": [[16, "save-ml-programs-as-model-packages"]], "Save and Load the Model": [[26, "save-and-load-the-model"]], "Save and Open in Xcode": [[24, "save-and-open-in-xcode"]], "Scalar palettization": [[61, "scalar-palettization"]], "Scenario 1 : Minimizing model size": [[60, "scenario-1-minimizing-model-size"]], "Scenario 2: Minimizing latency": [[60, "scenario-2-minimizing-latency"]], "Scikit-learn": [[66, null]], "Security issue": [[23, null]], "Segmentation Example": [[78, "segmentation-example"]], "Select From Predetermined Shapes": [[21, "select-from-predetermined-shapes"]], "Sensitive K-Means": [[46, "sensitive-k-means"]], "Sensitive K-Means Palettization API Example": [[47, "sensitive-k-means-palettization-api-example"]], "Set Names for PyTorch Conversion": [[32, "set-names-for-pytorch-conversion"]], "Set Up Conda": [[25, "set-up-conda"]], "Set Up a New Virtual Environment": [[25, "set-up-a-new-virtual-environment"]], "Set Up the Model": [[6, "set-up-the-model"]], "Set the Compute Precision for an ML Program": [[38, "set-the-compute-precision-for-an-ml-program"]], "Set the Compute Units": [[28, "set-the-compute-units"]], "Set the Index Type": [[75, "set-the-index-type"]], "Set the ML Program Precision": [[16, "set-the-ml-program-precision"]], "Set the Model Metadata": [[26, "set-the-model-metadata"]], "Set the Model to Evaluation Mode": [[8, null], [13, null]], "Set the Model\u2019s Metadata": [[5, "set-the-models-metadata"]], "Set the Number of Neighbors Value": [[75, "set-the-number-of-neighbors-value"]], "Set the Range for Each Dimension": [[21, "set-the-range-for-each-dimension"]], "Set the Scalar Type": [[24, "set-the-scalar-type"]], "Set the dtype": [[32, "set-the-dtype"]], "Software Availability of Optimizations": [[62, "software-availability-of-optimizations"]], "Source and Conversion Formats": [[68, null]], "Source code": [[23, null]], "SparseGPT": [[51, "sparsegpt"]], "Specify pass pipeline": [[40, "specify-pass-pipeline"]], "Specifying Compute Units": [[34, "specifying-compute-units"]], "Starting a Deep Learning Core ML Model": [[20, "starting-a-deep-learning-core-ml-model"]], "Stateful Models": [[67, null]], "Step 1: Register the MIL Operator": [[19, "step-1-register-the-mil-operator"]], "Step 2: Define a TensorFlow Composite Operator": [[19, "step-2-define-a-tensorflow-composite-operator"]], "Step 3: Convert the Model": [[19, "step-3-convert-the-model"]], "Step 4: Implement Classes in Swift": [[19, "step-4-implement-classes-in-swift"]], "Summary": [[60, "summary"], [60, "id3"]], "Support": [[76, null]], "Supported Core ML File Formats": [[68, "supported-core-ml-file-formats"]], "Supported Libraries and Frameworks": [[64, "supported-libraries-and-frameworks"]], "Supported Operations": [[68, "supported-operations"]], "Supported Python and MacOS Versions": [[25, null]], "Supported Source Formats": [[68, "supported-source-formats"]], "Symmetric Quantization": [[58, "symmetric-quantization"]], "Target Conversion Formats": [[68, "target-conversion-formats"]], "TensorFlow 1": [[18, "tensorflow-1"]], "TensorFlow 1 Workflow": [[69, null]], "TensorFlow 2": [[18, "tensorflow-2"]], "TensorFlow 2 Workflow": [[70, null]], "TensorFlow versions 1.x Formats": [[68, "tensorflow-versions-1-x-formats"]], "TensorFlow versions 2.x Formats": [[68, "tensorflow-versions-2-x-formats"]], "Test With an Image": [[24, "test-with-an-image"]], "The GPT-2 NLP Model": [[10, "the-gpt-2-nlp-model"]], "The Vision Classifier API": [[1, null]], "Timing Example": [[34, "timing-example"]], "Tokenize the Prompt": [[11, "tokenize-the-prompt"]], "Torch.Export Dynamism": [[21, null], [21, null]], "TorchScript": [[13, "torchscript"]], "Trace / Export the Model": [[8, "trace-export-the-model"]], "Trace and Script the Model": [[10, "trace-and-script-the-model"]], "Trace the Model with Sample Input": [[5, "trace-the-model-with-sample-input"]], "Trace the PyTorch Model": [[24, "trace-the-pytorch-model"]], "Tracing Limitations": [[36, null]], "Training time Pruning": [[52, "training-time-pruning"]], "Trees and Linear Models": [[18, "trees-and-linear-models"]], "Typed Execution": [[71, null]], "Typed Execution Workflow Example": [[72, null]], "Types of Compression": [[43, "types-of-compression"]], "Types of Inputs and Outputs": [[34, "types-of-inputs-and-outputs"]], "Unified Conversion": [[0, null]], "Updatable Models": [[18, "updatable-models"], [74, null]], "Update a Core ML Model to Use Flexible Input Shapes": [[21, "update-a-core-ml-model-to-use-flexible-input-shapes"]], "Update the Metadata and Input/Output Descriptions": [[29, "update-the-metadata-and-input-output-descriptions"]], "Upgrade Core ML Tools": [[25, "upgrade-core-ml-tools"]], "Use Custom Operators as a Last Resort": [[19, null]], "Use Image Inputs": [[69, null]], "Use Images": [[32, "use-images"]], "Use JIT Script": [[35, "use-jit-script"]], "Use a Dynamic TensorFlow Model": [[6, "use-a-dynamic-tensorflow-model"]], "Use a compiled model for faster initialization": [[20, "use-a-compiled-model-for-faster-initialization"]], "Use an ImageType": [[24, "use-an-imagetype"]], "Use an MLMultiArray": [[24, "use-an-mlmultiarray"]], "Use compression info embedded in torch models": [[40, "use-compression-info-embedded-in-torch-models"]], "Use the Default Behavior": [[32, "use-the-default-behavior"]], "Use the MLModel for Prediction": [[29, "use-the-mlmodel-for-prediction"]], "Use the Model with Xcode": [[26, "use-the-model-with-xcode"]], "Using Compiled Python Models for Prediction": [[34, "using-compiled-python-models-for-prediction"]], "Using Composite Ops with PyTorch Conversion": [[3, "using-composite-ops-with-pytorch-conversion"]], "Using States with Predictions": [[67, "using-states-with-predictions"]], "Using the Metadata": [[30, "using-the-metadata"]], "Vector Palettization": [[48, "vector-palettization"]], "Version 1": [[40, "version-1"]], "What Is Core ML Tools?": [[64, null]], "What\u2019s New": [[62, null]], "Why Combine Models?": [[37, "why-combine-models"]], "Why Tracing May Not be Accurate": [[35, "why-tracing-may-not-be-accurate"]], "Why Use a Compiled Model?": [[34, "why-use-a-compiled-model"]], "Why optimize.torch is better than PyTorch\u2019s default quantization": [[20, "why-optimize-torch-is-better-than-pytorchs-default-quantization"]], "With calibration dataset": [[63, "with-calibration-dataset"]], "With fine-tuning": [[63, "with-fine-tuning"]], "Work With the Spec": [[29, "work-with-the-spec"]], "Working with Core ML Models": [[57, "working-with-core-ml-models"]], "Working with PyTorch Models": [[57, "working-with-pytorch-models"]], "XGBoost": [[79, null]], "Xcode Info": [[1, null]], "Xcode Model Preview Types": [[18, "xcode-model-preview-types"], [78, null]], "coremltools 4": [[20, "coremltools-4"]], "coremltools 5": [[20, "coremltools-5"]], "coremltools 6": [[20, "coremltools-6"]], "coremltools 7": [[20, "coremltools-7"]], "macOS Required for Model Prediction": [[34, null]]}, "docnames": ["index", "source/classifiers", "source/comparing-ml-programs-and-neural-networks", "source/composite-operators", "source/conversion-options", "source/convert-a-pytorch-segmentation-model", "source/convert-a-tensorflow-1-deepspeech-model", "source/convert-a-tensorflow-1-image-classifier", "source/convert-a-torchvision-model-from-pytorch", "source/convert-learning-models", "source/convert-nlp-model", "source/convert-openelm", "source/convert-pytorch", "source/convert-pytorch-workflow", "source/convert-tensorflow", "source/convert-tensorflow-2-bert-transformer-models", "source/convert-to-ml-program", "source/convert-to-neural-network", "source/coremltools-examples", "source/custom-operators", "source/faqs", "source/flexible-inputs", "source/graph-passes-intro", "source/how-to-contribute", "source/image-inputs", "source/installing-coremltools", "source/introductory-quickstart", "source/libsvm-conversion", "source/load-and-convert-model", "source/mlmodel", "source/mlmodel-utilities", "source/model-exporting", "source/model-input-and-output-types", "source/model-intermediate-language", "source/model-prediction", "source/model-scripting", "source/model-tracing", "source/multifunction-models", "source/new-conversion-options", "source/new-features", "source/opt-conversion", "source/opt-joint-compression", "source/opt-opt1_3", "source/opt-overview", "source/opt-overview-examples", "source/opt-palettization", "source/opt-palettization-algos", "source/opt-palettization-api", "source/opt-palettization-overview", "source/opt-palettization-perf", "source/opt-pruning", "source/opt-pruning-algos", "source/opt-pruning-api", "source/opt-pruning-overview", "source/opt-pruning-perf", "source/opt-quantization", "source/opt-quantization-algos", "source/opt-quantization-api", "source/opt-quantization-overview", "source/opt-quantization-perf", "source/opt-resnet", "source/opt-stable-diffusion", "source/opt-whats-new", "source/opt-workflow", "source/overview-coremltools", "source/quantization-neural-network", "source/sci-kit-learn-conversion", "source/stateful-models", "source/target-conversion-formats", "source/tensorflow-1-workflow", "source/tensorflow-2", "source/typed-execution", "source/typed-execution-example", "source/unified-conversion-api", "source/updatable-model-examples", "source/updatable-nearest-neighbor-classifier", "source/updatable-neural-network-classifier-on-mnist-dataset", "source/updatable-tiny-drawing-classifier-pipeline-model", "source/xcode-model-preview-types", "source/xgboost-conversion"], "envversion": {"sphinx": 62, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2}, "filenames": ["index.rst", "source/classifiers.md", "source/comparing-ml-programs-and-neural-networks.md", "source/composite-operators.md", "source/conversion-options.rst", "source/convert-a-pytorch-segmentation-model.md", "source/convert-a-tensorflow-1-deepspeech-model.md", "source/convert-a-tensorflow-1-image-classifier.md", "source/convert-a-torchvision-model-from-pytorch.md", "source/convert-learning-models.rst", "source/convert-nlp-model.md", "source/convert-openelm.md", "source/convert-pytorch.rst", "source/convert-pytorch-workflow.md", "source/convert-tensorflow.rst", "source/convert-tensorflow-2-bert-transformer-models.md", "source/convert-to-ml-program.md", "source/convert-to-neural-network.md", "source/coremltools-examples.md", "source/custom-operators.md", "source/faqs.md", "source/flexible-inputs.md", "source/graph-passes-intro.md", "source/how-to-contribute.md", "source/image-inputs.md", "source/installing-coremltools.md", "source/introductory-quickstart.md", "source/libsvm-conversion.md", "source/load-and-convert-model.md", "source/mlmodel.md", "source/mlmodel-utilities.md", "source/model-exporting.md", "source/model-input-and-output-types.md", "source/model-intermediate-language.md", "source/model-prediction.md", "source/model-scripting.md", "source/model-tracing.md", "source/multifunction-models.md", "source/new-conversion-options.md", "source/new-features.md", "source/opt-conversion.md", "source/opt-joint-compression.md", "source/opt-opt1_3.md", "source/opt-overview.md", "source/opt-overview-examples.rst", "source/opt-palettization.rst", "source/opt-palettization-algos.md", "source/opt-palettization-api.md", "source/opt-palettization-overview.md", "source/opt-palettization-perf.md", "source/opt-pruning.rst", "source/opt-pruning-algos.md", "source/opt-pruning-api.md", "source/opt-pruning-overview.md", "source/opt-pruning-perf.md", "source/opt-quantization.rst", "source/opt-quantization-algos.md", "source/opt-quantization-api.md", "source/opt-quantization-overview.md", "source/opt-quantization-perf.md", "source/opt-resnet.md", "source/opt-stable-diffusion.md", "source/opt-whats-new.md", "source/opt-workflow.md", "source/overview-coremltools.md", "source/quantization-neural-network.md", "source/sci-kit-learn-conversion.md", "source/stateful-models.md", "source/target-conversion-formats.md", "source/tensorflow-1-workflow.md", "source/tensorflow-2.md", "source/typed-execution.md", "source/typed-execution-example.md", "source/unified-conversion-api.md", "source/updatable-model-examples.rst", "source/updatable-nearest-neighbor-classifier.md", "source/updatable-neural-network-classifier-on-mnist-dataset.md", "source/updatable-tiny-drawing-classifier-pipeline-model.md", "source/xcode-model-preview-types.md", "source/xgboost-conversion.md"], "indexentries": {}, "objects": {}, "objnames": {}, "objtypes": {}, "terms": {"": [0, 2, 7, 8, 10, 11, 13, 19, 21, 23, 26, 28, 29, 31, 34, 35, 36, 37, 40, 42, 43, 46, 47, 48, 51, 52, 53, 54, 56, 57, 60, 61, 63, 64, 65, 67, 69, 71, 72, 75, 76, 78], "0": [3, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 21, 24, 25, 26, 27, 28, 30, 31, 32, 33, 34, 35, 37, 39, 41, 42, 46, 47, 49, 51, 52, 53, 54, 56, 57, 59, 60, 61, 64, 67, 68, 69, 70, 72, 75, 76, 77, 78], "00": 26, "01": [41, 46, 51, 76], "01000001b": 53, "01963953e": 7, "02": [26, 42], "03": [46, 51, 52, 59, 61], "04": [7, 46, 51, 56, 59, 61], "044715": 70, "05": [7, 46, 56, 59, 60], "0507009873554805": 3, "05m": 60, "06": [26, 51, 54], "07": [42, 51, 56, 59], "07m": 60, "08": [42, 46, 51], "09": [56, 59, 60], "0_224": [28, 69], "0_224_frozen": [28, 69], "0_unet": 61, "0b1": [62, 63, 72], "0b3": [16, 72], "0e3": 67, "1": [1, 2, 3, 5, 8, 10, 11, 13, 14, 15, 20, 21, 23, 24, 25, 26, 27, 29, 30, 31, 33, 34, 35, 36, 37, 39, 41, 42, 43, 46, 47, 48, 49, 51, 53, 54, 56, 57, 59, 61, 62, 64, 67, 70, 75, 76, 77, 78], "10": [3, 13, 15, 16, 21, 24, 25, 26, 30, 33, 39, 42, 47, 56, 59, 68, 69, 70, 72, 73, 76], "100": [21, 23, 24, 26, 33, 52, 57, 60, 67, 72], "1000": [8, 26, 41, 57, 60, 75], "10000": 10, "1001": [26, 69, 75], "1024": [42, 48, 52, 67], "1097": 26, "11": [16, 24, 25, 26, 37, 39, 42, 46, 68], "12": [5, 6, 13, 15, 16, 25, 26, 39, 42, 54, 60, 68, 78], "121": 67, "1240": 34, "125": 52, "127": [24, 26, 58, 70], "128": [5, 11, 20, 21, 31, 42, 46, 51, 56, 57, 58, 60, 61, 63, 70, 75, 76, 77], "12_h": 15, "12m": 60, "13": [13, 24, 25, 38, 39, 49, 54, 60, 64, 68, 73], "133252": [], "14": [34, 42, 46, 49, 51, 54, 56, 59, 64], "14m": 60, "15": [5, 7, 8, 13, 16, 24, 25, 34, 42, 49, 54, 57, 58, 59, 60, 61, 68, 72], "15576135e": 7, "15x": 60, "16": [2, 6, 20, 30, 31, 32, 36, 38, 39, 40, 42, 43, 48, 49, 54, 57, 60, 61, 63, 71, 72, 78], "17": [15, 21, 34, 42, 51, 54, 59, 75], "18": [42, 59, 60, 64], "18763": 42, "19": 6, "192": 70, "198": 10, "1b": 43, "1e": [33, 40, 69, 70, 72], "1f": 34, "1gb": 61, "1m": 60, "1x": 42, "2": [2, 3, 5, 6, 7, 8, 11, 13, 14, 20, 21, 23, 24, 25, 26, 30, 31, 33, 34, 35, 36, 37, 39, 40, 41, 42, 43, 46, 47, 48, 49, 53, 54, 61, 62, 64, 67, 69, 72, 75, 76, 78], "20": [34, 57, 59, 69, 75], "200": [57, 60], "2023": [34, 43], "2024": [37, 43, 67], "2048": [6, 30, 42, 47], "205": 26, "20db": 72, "21": [5, 26, 42, 61], "2109375": 8, "21910377e": 7, "22": [42, 60, 61, 64], "224": [1, 5, 7, 8, 13, 20, 21, 24, 26, 28, 34, 36, 57, 60, 69], "225": [5, 8, 20, 24], "226": [8, 20, 24], "229": [5, 8, 20, 24], "23": [42, 46, 60], "231": 61, "238": 67, "23m": 60, "24": [26, 42], "24x": 42, "25": [5, 21, 46, 60, 76], "25000": 60, "255": [5, 7, 8, 20, 24, 77], "256": [24, 28, 40, 41, 67], "25600": 30, "257798194885254": 8, "25m": 60, "26": [6, 42, 60], "27": [46, 59], "270m": 11, "28": [42, 46, 54, 70, 76, 77], "28x28": 76, "29": [46, 60], "299": [28, 70], "2gb": 61, "2x": [42, 63, 65], "3": [1, 5, 7, 8, 11, 13, 15, 20, 21, 24, 25, 26, 28, 30, 31, 33, 34, 35, 36, 37, 40, 41, 42, 43, 46, 48, 49, 51, 53, 54, 57, 60, 62, 64, 67, 69, 70, 72, 75, 76, 77, 78], "30": [42, 75], "300": 60, "301507": 7, "305": 26, "3072": 40, "31": 42, "31087": 42, "312": 75, "313": 75, "314": 75, "315": 75, "316": 75, "32": [2, 11, 16, 20, 24, 31, 32, 37, 42, 43, 61, 63, 65, 70, 71, 76], "320": 75, "32000": 67, "321": 75, "322": 75, "323": 75, "324": 75, "32l": 76, "33": 42, "33356332e": 7, "34": 42, "34m": 60, "35": 42, "36": [59, 67], "36m": 37, "37": [49, 51, 54, 59], "38": [42, 46, 49, 59], "38136549121856": 72, "384": 15, "38m": 60, "39": 42, "3b": 42, "3d": 61, "3x": 42, "4": [2, 7, 11, 12, 14, 15, 16, 21, 25, 31, 33, 40, 41, 42, 43, 46, 47, 48, 49, 53, 54, 56, 57, 58, 59, 60, 61, 62, 63, 65, 67, 68, 70, 72, 75, 76], "40": [51, 61], "400": 57, "406": [5, 8, 20, 24], "4096": 65, "41": [42, 49], "4245": 67, "428": 26, "43": [42, 49, 59], "44": [46, 59], "45": [21, 33, 42, 49, 59], "456": [5, 8, 20, 24, 42], "46": [11, 46, 54], "47": [42, 46, 49, 51], "48": [42, 49, 54, 59, 60], "485": [5, 8, 20, 24], "49": [42, 59, 67], "4921875": 8, "49mb": 60, "4bit": 61, "4x": 42, "4x6000x32": 37, "5": [10, 11, 16, 24, 25, 30, 39, 41, 42, 46, 49, 52, 54, 60, 67, 70, 72, 75, 76, 77], "50": [21, 26, 30, 51, 52, 54, 59, 60, 61], "51": [42, 60], "512": [47, 57, 61, 72], "52": [49, 51, 54, 59], "521": 42, "522": 42, "53": 42, "53633975982666": 8, "54": [46, 49], "55": [42, 51, 60], "56": 53, "57": 42, "58": [42, 46, 51], "59": [42, 46], "6": [5, 13, 21, 24, 25, 26, 30, 32, 36, 39, 42, 43, 46, 47, 48, 49, 52, 60, 61, 62, 63, 65, 67, 70, 76], "60": [34, 42, 46, 51, 60], "6000": 37, "6000x6000": 37, "61": 42, "62": 42, "62500": 60, "63": [42, 46, 49], "636": 6, "63m": 60, "64": [10, 11, 31, 35, 42, 47, 48, 51, 61, 63, 76], "642789840698242": 8, "65": [46, 49], "66": [42, 46, 51, 56, 59, 60], "668": 42, "67": [21, 26, 46, 49, 51], "6703": 42, "6704": 42, "6732632423543772": 3, "676": 42, "677": 42, "68": [42, 46], "68707207e": 7, "69": [51, 60], "6bit": 61, "6x": 42, "7": [6, 7, 16, 17, 25, 26, 27, 28, 32, 34, 39, 40, 46, 49, 53, 54, 60, 65, 67, 68, 72, 76], "70": [13, 31, 46, 63], "70db": 72, "71": [26, 42, 46, 49, 51, 56, 59, 60, 72], "71m": 60, "72": [30, 42, 60], "72m": 37, "73": [51, 54, 60], "74": [26, 60], "747": 42, "75": [46, 51, 52, 53, 54, 60, 63], "75957654819682": 72, "76": [42, 46, 51, 56, 59, 60], "768_a": 15, "768m": 37, "77": [51, 54, 56, 59], "78": [26, 42, 56, 59], "79": 24, "79885838e": 7, "7b": [11, 67], "8": [5, 8, 11, 13, 15, 16, 24, 25, 27, 31, 40, 41, 42, 43, 46, 47, 48, 49, 52, 54, 56, 57, 58, 60, 61, 63, 68, 76], "80": [41, 42, 46, 56, 59, 61], "8046875": 8, "81": [42, 46, 60, 67], "819": 42, "82": [46, 49], "820": 42, "821": 42, "822": 42, "823": 42, "824": 42, "827": 42, "828": 42, "829": 42, "83": [42, 51], "834": 42, "835": 42, "85": 49, "86": [46, 51, 56, 59], "86m": 60, "89": [46, 56, 59], "8bit": 61, "8gb": 61, "9": [25, 27, 39, 42, 46, 49, 60, 67, 72, 76], "90": 60, "91": 42, "92": [42, 46, 49, 56, 59], "93": [42, 46, 49], "933": 61, "94": [46, 49, 59], "942": 61, "95": 72, "95x": 42, "97": [46, 49], "98": [42, 46, 56, 59], "98c77c72c722": 75, "99": [46, 49, 56, 59], "993": 42, "A": [1, 2, 5, 10, 19, 23, 25, 29, 30, 31, 33, 34, 36, 37, 42, 47, 52, 53, 57, 63, 70, 71, 76], "And": [57, 75], "As": [2, 3, 5, 8, 10, 11, 13, 19, 20, 23, 24, 31, 34, 40, 41, 42, 43, 52, 71, 72], "At": [2, 11, 29, 31, 35, 37, 42], "Be": [3, 20], "But": 11, "By": [2, 8, 24, 28, 34, 40, 65, 71, 75], "For": [0, 1, 2, 3, 5, 7, 8, 10, 11, 16, 19, 20, 21, 22, 23, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 40, 41, 42, 43, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 60, 61, 62, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 76, 78, 79], "If": [2, 5, 7, 8, 10, 13, 16, 18, 20, 21, 23, 24, 25, 26, 30, 31, 33, 34, 35, 36, 38, 39, 40, 47, 51, 52, 56, 63, 70, 71, 72, 75], "In": [1, 2, 3, 6, 7, 8, 10, 11, 13, 16, 19, 21, 24, 26, 28, 30, 31, 32, 33, 34, 36, 37, 39, 40, 41, 42, 43, 46, 47, 49, 51, 52, 53, 57, 58, 59, 60, 61, 63, 67, 69, 71, 76, 77], "It": [3, 6, 7, 8, 13, 16, 19, 24, 26, 29, 30, 31, 37, 43, 46, 48, 51, 54, 57, 63, 67, 73, 76, 77], "No": 40, "On": [2, 16, 35, 43, 53, 58, 59, 60], "One": [24, 41, 42, 47, 51, 52, 67], "Such": [42, 71], "That": [43, 47], "The": [2, 3, 5, 6, 7, 8, 11, 13, 15, 16, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 44, 46, 47, 48, 49, 51, 52, 54, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 75, 76, 77, 78], "Then": [26, 67, 70], "There": [2, 31, 40, 43, 46, 47, 58], "These": [21, 24, 26, 28, 34, 39, 42, 43, 46], "To": [1, 2, 3, 5, 6, 7, 8, 10, 11, 13, 15, 16, 17, 19, 21, 23, 24, 25, 26, 28, 29, 30, 32, 33, 34, 35, 37, 38, 39, 40, 43, 47, 51, 52, 57, 61, 63, 67, 68, 69, 70, 72, 76, 78], "With": [3, 6, 8, 10, 13, 17, 21, 26, 34, 39, 40, 42, 60, 61, 64, 67, 71, 72], "_": [10, 34, 35], "__future__": 7, "__init__": [5, 10, 19, 21, 31, 35, 36, 37, 65, 67, 70], "_compute_snr": 72, "_coreml_": 40, "_featuretypes_pb2": 76, "_get_input": 3, "_loopbodi": 35, "_minimum_updatable_spec_vers": 77, "_normal": 72, "_op_req": 19, "_save_h5_as_frozen_pb": 20, "_tf_ops_registri": 3, "_torch_ops_registri": 3, "a100": 42, "a16": [49, 54, 59], "a16w16": 41, "a16w8": 41, "a17": [43, 58, 59, 60, 62], "a17pro": 41, "a8w16": [41, 63], "a8w4": 41, "a8w8": [41, 63], "abil": 71, "abl": [34, 42, 54, 60, 61, 63, 65, 76], "about": [0, 1, 5, 7, 8, 10, 11, 13, 20, 23, 24, 25, 26, 29, 30, 37, 38, 39, 40, 42, 43, 46, 47, 51, 52, 54, 57, 60, 61, 64, 68, 70, 73, 77, 78], "abov": [1, 3, 6, 8, 13, 16, 19, 24, 33, 37, 40, 41, 42, 48, 52, 54, 57, 58, 60, 61, 62, 69, 72], "abscent": 31, "abstract": 2, "acc": 67, "acc_in": 67, "acc_out": 67, "acceler": [62, 71], "accept": [6, 21, 24, 47, 76], "access": [25, 30, 43, 62, 63], "accommod": 33, "accompani": 53, "accord": 24, "accordingli": [47, 61, 63, 67], "account": 63, "accumulator_st": 67, "accumulator_valu": 67, "accur": [13, 40, 41, 46, 51, 52, 56, 61, 72], "accuraci": [6, 7, 8, 18, 21, 23, 26, 30, 41, 42, 43, 44, 46, 49, 52, 54, 57, 58, 59, 60, 61, 63, 65, 69, 71, 72, 76], "achiev": [34, 42, 43, 46, 47, 51, 54, 58, 60, 63], "across": [46, 51, 52, 58, 61, 67], "act_quant_model_config": 63, "act_quant_op_config": 63, "action": [5, 71], "activ": [7, 25, 34, 35, 41, 43, 47, 51, 59, 62, 63, 70, 72, 76], "activation_config": 57, "activation_dtyp": 41, "actual": [2, 19, 21, 28, 34, 35, 38, 42, 57, 60, 72], "ad": [2, 3, 7, 8, 13, 20, 24, 26, 31, 37, 49, 59, 62, 68, 77, 78], "adam": 76, "adapt": [51, 52, 63], "adapt_model_with_lora": 37, "adapted_model": 37, "adapted_model_1": 37, "adapted_model_2": 37, "adapter_1": 37, "adapter_2": 37, "add": [1, 3, 5, 8, 15, 18, 19, 20, 22, 23, 26, 34, 52, 67, 69, 71, 72, 76, 77, 78], "add_funct": 37, "add_multiarray_ndshape_enumer": 21, "add_weight": 70, "addit": [1, 2, 6, 7, 8, 16, 23, 24, 26, 28, 37, 40, 47, 51, 54, 57, 61, 63, 67, 71, 76], "address": [7, 23], "adher": [15, 19, 53], "adjust": [57, 61, 76], "adopt": 34, "advanc": 26, "advancedquantizedlayerselector": 65, "advantag": [2, 26, 34, 49, 53], "aeroplan": [5, 78], "affect": [65, 71], "affin": [40, 58], "after": [6, 7, 19, 20, 24, 25, 26, 32, 34, 35, 57, 65, 67, 69, 78], "again": [3, 7, 34, 72, 76], "against": 30, "ahead": 62, "ai": 37, "aim": 61, "algorithm": [8, 43, 45, 47, 49, 50, 52, 54, 55, 57, 60, 61, 62, 63, 65], "all": [1, 2, 3, 6, 8, 10, 18, 19, 20, 23, 24, 25, 28, 31, 33, 34, 35, 36, 38, 39, 40, 42, 43, 46, 47, 49, 51, 52, 54, 57, 59, 60, 61, 62, 63, 64, 65, 68, 69, 71, 72, 77], "allenai": 42, "allow": [21, 30, 31, 40, 42, 43, 46, 48, 51, 52, 56, 57, 65, 71, 74, 75], "allowed_rang": 75, "allowed_set": [75, 76], "almost": 42, "along": [6, 11, 23, 30, 42, 46, 47, 48, 49, 51, 53, 57, 67, 76], "alongsid": [67, 76], "alpha": 3, "alpha_composit": 5, "alpha_imag": 5, "alphabet": 6, "alphabet_config_path": 6, "alreadi": [6, 13, 20, 23, 25, 28, 31, 34, 35, 47, 52], "also": [1, 2, 5, 6, 7, 8, 13, 15, 16, 20, 21, 22, 24, 25, 26, 28, 29, 30, 31, 32, 38, 40, 41, 42, 43, 46, 47, 48, 49, 51, 54, 56, 57, 58, 60, 62, 63, 67, 68, 69, 71, 72, 76], "alter": 65, "altern": [7, 17, 70, 72], "although": 2, "alwai": [7, 20, 40, 63, 65, 69], "amax": [7, 72], "amin": 72, "amount": [43, 49, 52, 54, 57, 59, 63], "an": [1, 2, 3, 5, 7, 8, 12, 13, 15, 16, 18, 19, 21, 22, 23, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 39, 40, 42, 43, 44, 46, 47, 48, 49, 52, 57, 58, 60, 61, 62, 63, 64, 67, 68, 69, 70, 71, 75, 76, 78], "analysi": 1, "analyz": [35, 57], "andrei": 26, "andrew": 26, "angl": 61, "ani": [2, 6, 23, 24, 25, 29, 31, 40, 42, 47, 51, 53, 60, 61, 64, 69, 71, 72], "anim": 26, "annot": 21, "anoth": [2, 8, 26, 30, 31, 34, 37, 52, 62, 67, 70, 71], "answer": [15, 42], "ant": 8, "antialia": [24, 34, 72], "anymor": 61, "anyth": 40, "api": [2, 6, 8, 13, 14, 16, 18, 19, 20, 22, 24, 26, 27, 28, 30, 31, 32, 33, 34, 38, 39, 40, 41, 42, 43, 45, 46, 49, 50, 51, 55, 60, 61, 64, 65, 66, 67, 68, 69, 70, 74, 79], "app": [25, 26, 34, 37, 43, 64, 71], "appear": [5, 10, 11, 15, 23, 25, 26, 35, 78], "append": [6, 10, 11, 30], "appl": [5, 11, 15, 20, 23, 26, 34, 56, 61, 64, 78], "appli": [5, 8, 10, 19, 20, 22, 24, 30, 32, 35, 36, 40, 41, 42, 43, 44, 46, 47, 51, 52, 54, 56, 57, 60, 61, 62, 63, 67, 70, 71, 72, 75], "applic": [8, 11, 23, 24, 26, 28, 62, 70, 72], "approach": [7, 37, 40, 41, 43, 46, 56, 57, 63], "appropri": [7, 23, 25, 35, 63, 72], "approv": 23, "approxim": [37, 43, 46, 48, 70], "ar": [1, 2, 3, 4, 6, 7, 8, 10, 11, 13, 16, 18, 19, 20, 21, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 60, 61, 62, 63, 64, 67, 68, 69, 71, 72, 75, 76, 77], "arbitrari": [6, 21], "arc_challeng": 42, "arc_easi": 42, "architectur": [2, 6, 8, 16, 26, 42, 46, 51, 67, 78], "archiv": 2, "area": 29, "arg": 31, "argmax": [5, 7, 10, 11], "argsort": 8, "argument": [8, 16, 17, 24, 30, 33, 34, 37, 40, 41, 47, 65, 68, 69, 72], "arithmet": 56, "around": 56, "arrai": [6, 7, 8, 15, 18, 19, 21, 24, 32, 67, 70, 72], "arrang": 30, "arrayfeaturetyp": [30, 34], "arteri": 10, "articl": [60, 61, 67], "artist": 34, "as_default": [7, 69, 72], "as_tensor": 5, "as_text": 69, "asarrai": 8, "ask": [20, 23], "assert": [3, 8, 26], "assert_allclos": [67, 69, 70], "asset": [6, 34, 37], "assign": [5, 23, 46], "associ": [1, 2, 75, 76, 77], "assum": [1, 8, 24, 34, 35, 40, 47], "assumpt": 34, "astyp": [5, 6, 7, 8, 15, 33, 34, 72], "asymmetr": [20, 56], "async": 34, "atol": [69, 70], "attach": [37, 63], "attempt": 75, "attent": [31, 46, 47], "attention_mask": 67, "attention_output": 67, "attn_mask": 67, "attr": [3, 19], "attribut": [2, 10, 19, 24], "audio_sample_16bit_mono_16khz": 6, "audiofil": 6, "augment": [18, 49], "author": [7, 11, 23, 26, 29, 31, 42, 75, 76, 77], "auto": 67, "automat": [1, 6, 7, 18, 20, 21, 24, 25, 31, 32, 40, 42, 56, 69, 71, 72, 76], "automodelforcausallm": 11, "autotoken": [11, 42], "avail": [2, 3, 5, 6, 19, 24, 25, 26, 28, 34, 37, 38, 39, 40, 41, 42, 46, 47, 49, 56, 57, 60, 63, 65, 67, 68, 69, 71, 78], "averag": [13, 24, 42, 63], "avg": 35, "avoid": [19, 61, 67], "await": 23, "awar": [51, 56, 63], "ax": 33, "ax_arrai": 72, "axi": [7, 8, 11, 19, 40, 47, 72], "b": [3, 69, 70], "back": [2, 11, 20, 61, 67, 75], "backbon": [37, 46, 49], "backend": [2, 19, 20, 34, 43, 49, 61, 68, 71, 76], "backend_mlprogram": 26, "background": [5, 8, 26, 61, 72, 77, 78], "backward": [41, 57, 60], "bake": 26, "bandwidth": 58, "bar": 71, "bare": 42, "base": [8, 10, 15, 21, 24, 25, 26, 28, 29, 31, 34, 36, 37, 41, 43, 44, 46, 47, 51, 54, 59, 60, 61, 62, 71, 72, 75, 77], "base_model": 37, "base_xl": 61, "baselin": [60, 61], "basic": 25, "batch": [3, 11, 13, 24, 34, 41, 49, 54, 57, 59, 60, 70, 76], "batch_dim": 31, "batch_idx": 47, "batch_siz": [15, 41, 67], "batchnorm": [22, 65], "bath": [29, 34, 66], "bathroom": 29, "beauti": 61, "becaus": [2, 25, 28, 32, 34, 42, 43, 57, 67], "becom": [24, 42, 60], "bedroom": [29, 34, 66], "been": [6, 7, 13, 16, 23, 24, 31, 35, 40, 47, 52, 57, 60, 61, 62, 76], "befor": [8, 13, 19, 23, 24, 25, 26, 35, 41, 42, 43, 47, 48, 57, 67, 69, 70, 72, 77], "begin": [2, 7, 10], "begin_step": 51, "beginn": 2, "behav": 60, "behavior": [16, 19, 23, 28, 68], "behind": 33, "being": [23, 42, 47, 48, 49, 52, 60], "belong": 48, "below": [1, 10, 11, 26, 30, 37, 40, 41, 42, 46, 47, 49, 51, 52, 57, 59, 60, 62, 63, 67], "benchmark": [8, 10, 11, 46], "benefici": [37, 41, 42], "benefit": [24, 40, 42, 43, 49, 54, 57, 59, 60], "bert": [14, 18, 78], "bert_en_uncased_l": 15, "bert_lay": 15, "bert_with_preview_typ": 15, "bertqa": [15, 78], "best": [7, 21, 35, 41, 42, 43, 46, 51, 60, 63, 65, 69, 71, 72], "beta": [13, 16, 31, 62], "better": [2, 7, 13, 34, 41, 42, 43, 46, 51, 57, 60, 61, 63, 65, 67, 70, 72], "between": [2, 6, 10, 20, 26, 30, 31, 37, 38, 42, 43, 51, 56, 63, 65, 67, 68, 71, 72], "bia": [7, 8, 20, 24, 26, 37, 40, 58, 65, 70], "bias": [7, 24], "bias_scal": 40, "bicycl": [5, 78], "big": [60, 61], "billion": 42, "bin": 25, "bind": 19, "binder": 64, "bird": [5, 78], "bisect_model": 30, "bit": [18, 24, 40, 41, 42, 43, 46, 47, 48, 49, 53, 56, 57, 58, 59, 60, 61, 62, 63], "black": 77, "blanch": 24, "blob": [23, 76], "block": [2, 33, 35, 40, 42, 43, 51, 52, 53, 54, 56, 57, 58, 59, 61, 62, 63, 67], "block0": 33, "block_siz": [40, 42, 52, 57], "blockwis": 40, "blog": 67, "blue": [24, 61], "blue_bia": 24, "bnkd": 3, "bnqd": 3, "bnqk": 3, "board": [5, 78], "bodi": [10, 18, 35], "bold": 42, "bool": [19, 33], "boolinputtyp": 19, "both": [2, 6, 7, 24, 28, 32, 35, 37, 40, 42, 56, 58, 60, 63, 76], "bottl": [5, 78], "bottleneck": [24, 49, 58, 59], "bound": [21, 42, 43, 54, 58, 75], "branch": 25, "break": [6, 8, 30, 37, 42], "bridg": 10, "brief": 63, "bright": 61, "bring": 61, "broken": 30, "brows": 25, "browser": 64, "bsd": [23, 29], "bu": [5, 78], "budget": 60, "buffer": [40, 67], "bug": 23, "build": [15, 20, 23, 31, 33, 36, 42, 49, 54, 59, 60, 61, 70], "build_wheel_macos_py38": 25, "builder": [2, 3, 18, 19, 20, 33, 67, 75, 76], "built": [2, 40], "bulk": 10, "bundl": 34, "busiest": 10, "button": [24, 25], "byte": [5, 26, 54], "c": 29, "c4": 42, "cach": [34, 42, 43], "caff": [23, 39], "calcul": [37, 46, 67], "calibr": [43, 46, 47, 51, 58, 60, 61, 62], "calibration_data": 47, "calibration_data_load": 61, "calibration_nsampl": [42, 47, 52, 57, 61], "call": [2, 3, 6, 7, 18, 20, 22, 24, 31, 34, 37, 42, 46, 47, 48, 52, 58, 67, 68, 70, 72, 75], "callabl": 47, "can": [1, 2, 3, 5, 6, 8, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79], "can_allow_multiple_input_shap": 39, "candid": 15, "cannot": [5, 16, 30, 67], "canonic": 22, "canva": 61, "capabl": [6, 43, 69], "captur": [31, 34, 35, 36, 37, 49, 54, 57, 59], "car": [5, 78], "care": 67, "carefulli": 67, "carri": 31, "case": [2, 3, 6, 7, 8, 13, 15, 19, 21, 28, 30, 31, 33, 35, 36, 37, 41, 42, 43, 46, 47, 56, 58, 61, 62, 63, 67, 71, 72, 76], "cast": [38, 58, 72], "castl": 61, "cat": [5, 10, 11, 61, 78], "cat_dog": 5, "categor": 76, "categori": [5, 26, 63], "categorical_crossentropi": 76, "categoricalcrossentropylosslay": 76, "causal_mask": 67, "cce": 76, "cell": 6, "center": [2, 48], "centernet": [46, 49], "centric": 2, "centroid": [46, 47, 48], "cepstral": 6, "certain": [30, 40, 51, 52, 53, 65, 78], "chair": [5, 78], "chanc": 65, "chang": [6, 11, 20, 23, 28, 32, 34, 35, 39, 40, 52, 57, 61, 62, 67, 75, 76], "change_array_output_typ": 30, "change_input_output_tensor_typ": 30, "channel": [7, 24, 34, 35, 40, 47, 56, 57, 58, 59, 60, 62, 63, 65], "channel_axi": 47, "charact": 6, "characterist": [41, 63], "chart": 71, "check": [7, 18, 20, 23, 39, 43, 47, 63, 69, 70, 72, 75], "checkout": 47, "checkpoint": [6, 49], "checkpoint_dir": 6, "checkpoint_fil": 69, "chen": 26, "cherish": 11, "chieh": 26, "child": 30, "child_op": 30, "chip": [58, 59, 60, 62, 67], "choos": [5, 11, 26, 43, 46, 54, 57, 65, 68, 78], "chosen": [37, 51, 53, 57, 76], "chunk": [6, 30, 42], "ci": 25, "citi": 10, "ckpt": 69, "clarif": 23, "class": [0, 1, 3, 5, 7, 10, 18, 21, 24, 26, 28, 29, 30, 31, 32, 33, 35, 36, 37, 39, 41, 57, 62, 63, 64, 65, 67, 70], "class_id": 8, "class_label": [1, 8, 26, 76], "class_nam": 19, "classic": 73, "classif": [1, 24, 37, 49, 59, 70, 77], "classifi": [4, 8, 14, 24, 26, 28, 37, 66, 69, 73, 74, 78], "classifier_config": [1, 7, 8, 26], "classifierconfig": [1, 7, 8, 26], "classlabel": [1, 7, 8, 26], "clear": [61, 67], "clear_devic": 69, "clear_sess": 76, "click": [5, 7, 8, 15, 21, 24, 25, 26, 49, 51, 54, 59, 72, 77, 78], "clip": [58, 72], "clone": 25, "close": [7, 8, 24, 43], "closer": [13, 46], "cluster": [40, 46, 47, 48, 65], "cluster_dim": [40, 47, 48], "cluster_dtyp": 47, "cmake": 25, "cnn": 60, "coco": 49, "code": [1, 2, 3, 5, 6, 7, 8, 10, 11, 13, 18, 19, 21, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 41, 42, 47, 52, 57, 60, 61, 63, 67, 71, 72, 78], "coeffici": 6, "collect": [5, 26, 57, 60, 76, 78], "color": [5, 23, 24, 72, 78], "color_layout": 24, "colorimag": 24, "colorlayout": 24, "coloroutput": 24, "colorspac": [76, 77], "colortocolormodel": 24, "column": 25, "com": [5, 7, 8, 15, 23, 26, 77, 78], "combin": [0, 10, 22, 40, 43, 72], "combined_adpater_model": 37, "come": [7, 42], "comfort": 23, "command": [2, 5, 8, 10, 11, 23, 25, 26, 31, 61], "comment": [23, 26], "commerci": 61, "commit": [25, 52], "common": [2, 6, 8, 20, 30, 37, 47, 53, 67, 73], "commonli": [34, 49], "commun": [19, 23, 34], "compact": 40, "compar": [9, 16, 20, 26, 30, 33, 42, 54, 59, 61, 63, 67, 68, 69], "comparison": [2, 16, 33, 42, 67], "compat": [28, 69], "compil": [2, 18, 19, 21, 29, 43, 76], "compiled_model_path": 34, "compiledmlmodel": [20, 34, 37], "complet": [3, 10, 19, 23, 71], "complex": 2, "complic": [3, 35], "compon": [2, 6, 16, 24, 29], "compos": [5, 18, 77], "composit": [2, 4, 20, 33, 73], "compress": [0, 18, 20, 39, 44, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 62], "compressed_8_bit_model": 57, "compressed_mlmodel": [30, 47, 61, 63], "compressed_model": [41, 42, 57, 61], "compressed_model_a8": 57, "compressed_torch_model": 63, "compression_typ": 40, "compressor": [41, 52], "compressor_config": 61, "compris": 2, "comput": [2, 5, 7, 8, 19, 22, 33, 35, 41, 42, 43, 46, 47, 49, 51, 52, 54, 56, 57, 58, 59, 60, 61, 63, 65, 67, 71], "computation": 46, "compute_precis": [16, 20, 38, 71, 72], "compute_unit": [7, 8, 20, 28, 34, 38, 67], "computeunit": [8, 20, 28, 34, 38, 67], "conc_func": 70, "concat": 11, "concaten": 6, "concept": 31, "concis": 2, "conclud": 42, "concret": 68, "conda": [7, 72], "condit": [10, 13, 35, 36], "conduct": [23, 57], "confer": [34, 64], "config": [30, 34, 41, 42, 46, 47, 49, 51, 52, 54, 56, 57, 59, 60, 61, 63, 72], "config_dict": [41, 60], "configur": [6, 23, 25, 29, 30, 34, 40, 42, 48, 52, 57, 60, 63, 71], "confirm": [8, 75, 76], "connect": [2, 61, 64, 76], "consecut": 54, "consid": [20, 21, 26, 30, 31, 37, 41, 43, 60, 61], "consider": [20, 34, 42, 43, 46, 59], "consist": [2, 6, 8, 29, 31, 36, 72], "const": [2, 19, 30, 33, 69], "const_elimin": 22, "constant": [2, 22, 33, 52, 57], "constantsparsityschedul": [51, 52], "constexpr_blockwise_shift_scal": 40, "constexpr_lut_to_dens": 40, "constrain": [2, 58], "constraint": [21, 53, 57, 71], "construct": [3, 5, 6, 10, 18, 20, 30, 33, 35, 37, 40, 41, 47, 65, 67, 77], "consum": [1, 2, 71], "consumpt": [20, 24, 39, 43, 64], "contain": [6, 10, 16, 24, 28, 30, 32, 33, 37, 52, 53, 62, 76, 78], "content": [2, 6], "context": [3, 10, 15, 19], "contigu": 42, "continu": [2, 3, 10, 20, 25, 33, 68], "contrast": 71, "contribut": [0, 18, 22], "contributor": 23, "control": [2, 10, 13, 22, 24, 30, 34, 35, 36, 47, 57, 58, 71], "controlflownet": 35, "controlflownet2": 35, "conv": [21, 35, 40, 47], "conv1": [36, 47], "conv2d": [21, 35, 36, 52, 76], "conv3": 47, "conv_1": 40, "conv_2": 40, "conveni": [1, 13, 24, 30, 34, 63, 67, 70], "converg": 47, "convers": [1, 2, 6, 7, 8, 9, 12, 16, 19, 22, 24, 25, 26, 29, 30, 31, 33, 37, 39, 43, 47, 52, 64, 67, 69, 70, 71, 72, 73], "convert": [1, 2, 4, 18, 21, 22, 27, 29, 32, 34, 35, 36, 37, 41, 48, 49, 56, 61, 63, 64, 65, 66, 68, 71, 73, 78, 79], "convert_double_to_float_multiarray_typ": 30, "convert_keras_to_mlmodel": 76, "convert_neural_network_spec_weights_to_fp16": 39, "convert_neural_network_weights_to_fp16": 39, "convert_to": [16, 17, 21, 24, 26, 28, 38, 57, 68, 69, 70, 71], "converted_model": 67, "converted_model_kvcach": 67, "convnet": 43, "convolut": [8, 18, 35, 36, 52, 60, 65, 71, 76, 77], "copi": [5, 15, 23, 24, 34, 67], "copyfrom": 77, "copytre": 34, "core": [1, 3, 5, 6, 12, 14, 15, 16, 17, 18, 19, 22, 23, 24, 26, 27, 28, 30, 31, 32, 34, 35, 36, 37, 38, 40, 42, 43, 48, 49, 51, 53, 56, 58, 60, 61, 65, 66, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79], "coreltool": 51, "coreml": [5, 6, 7, 8, 13, 15, 20, 24, 26, 30, 33, 39, 41, 42, 46, 47, 49, 51, 52, 56, 57, 60, 62, 63, 72, 75, 78], "coreml_dict_nam": 8, "coreml_input": [7, 10], "coreml_model": [7, 24, 27, 57, 66, 79], "coreml_model_fil": 7, "coreml_model_path": 76, "coreml_out": [69, 70, 72], "coreml_out_dict": [8, 69, 70], "coreml_out_fp32": 72, "coreml_output": 7, "coreml_output_tensor": 72, "coreml_output_tensor_fp32": 72, "coreml_pred_dict": 7, "coreml_predicted_class_label": 7, "coreml_prob_dict": 8, "coreml_updatable_model_path": 76, "coreml_update_st": 67, "coremlcompil": 2, "coremltool": [0, 1, 2, 3, 5, 6, 7, 8, 10, 11, 13, 15, 16, 17, 19, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 37, 38, 39, 40, 41, 42, 43, 46, 47, 49, 51, 52, 54, 56, 57, 59, 60, 61, 62, 63, 64, 65, 66, 67, 69, 70, 71, 72, 73, 75, 76, 77, 78, 79], "coremlweightmetadata": 30, "correct": [7, 20, 26, 30, 34, 35, 42, 47, 56, 63], "correctli": [5, 7, 18, 26, 35, 40, 69, 70, 75], "correl": 42, "correspond": [1, 3, 6, 20, 24, 30, 40, 48, 72], "cost": 34, "could": [24, 34, 37, 40, 41, 42, 43, 53], "count": 65, "counterpart": 57, "countri": 10, "coupl": 41, "cours": [2, 26, 52, 75], "cover": [40, 43], "coverag": [13, 31], "cow": [5, 78], "cp38": 25, "cpu": [5, 7, 8, 20, 28, 34, 38, 42, 43, 54, 59, 64, 71, 72], "cpu_and_gpu": [28, 67], "cpu_and_n": 28, "cpu_onli": [8, 20, 28, 34, 61], "cpuonli": [20, 71], "crash": 21, "creat": [5, 6, 7, 8, 11, 13, 15, 16, 18, 19, 20, 23, 24, 25, 26, 28, 29, 31, 34, 37, 47, 48, 53, 57, 72], "create_keras_base_model": 76, "creation": [1, 34, 64], "creativecommon": 78, "cross": 76, "crossattndownblock2d": 61, "crossentropyloss": 42, "crucial": 7, "cr\u00e8che": 78, "csv": [29, 30, 66], "ct": [1, 3, 5, 6, 7, 8, 10, 11, 13, 15, 16, 17, 19, 20, 21, 22, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 37, 40, 41, 42, 47, 52, 57, 60, 61, 62, 63, 65, 66, 67, 69, 70, 71, 72, 78, 79], "ctc": 6, "cto": [30, 47, 57, 61, 62, 63], "cuda": 52, "current": [2, 13, 19, 23, 25, 31, 39, 43, 68, 75, 76], "curv": 63, "custom": [2, 4, 5, 30, 31, 47, 52, 57, 70, 73], "custom_lut": 65, "custom_mil_op": 19, "custom_tf_op": 19, "custom_topk": 19, "customdens": 70, "customtopk": 19, "cute": 15, "cvnet": [49, 59], "cvpixelbuff": 24, "d": [18, 48, 52, 61, 76], "dai": [11, 61], "daisi": [1, 8, 24, 26], "dat": 47, "data": [5, 8, 10, 13, 18, 23, 24, 26, 29, 30, 31, 32, 34, 35, 36, 40, 41, 43, 44, 46, 47, 49, 51, 53, 54, 58, 59, 61, 62, 64, 65, 66, 67, 70, 72, 76], "data_and_predict": 30, "data_load": 63, "data_util": 47, "dataload": [42, 47, 52, 57, 60], "dataset": [8, 10, 26, 42, 46, 49, 51, 52, 56, 59, 62, 76, 77], "datatyp": [76, 77], "de": [42, 57], "dead_code_elimin": 22, "deal": 30, "dearli": 11, "debug": [28, 30, 38], "decent": [61, 63], "decis": 67, "decod": [6, 8, 10, 11, 26, 42, 61], "decompos": 18, "decompress": [43, 49, 59, 62], "decor": [3, 19, 33], "decoupl": [2, 16], "decreas": [42, 43, 56], "dedupl": 37, "deep": [0, 2, 68, 73], "deeplab": [13, 31], "deeplabv3": 5, "deeplabv3_resnet101": 5, "deepspeech": [14, 18, 69], "def": [2, 3, 5, 7, 8, 10, 19, 20, 21, 24, 31, 33, 34, 35, 36, 37, 42, 61, 65, 67, 70, 72, 75, 76], "default": [6, 7, 8, 10, 17, 24, 26, 27, 28, 30, 37, 38, 40, 41, 47, 48, 52, 56, 57, 58, 65, 67, 69, 71, 72, 75, 77, 79], "default_class_label": [75, 77], "default_function_nam": 37, "default_palett": 40, "default_prun": 40, "default_s": 21, "default_token": 10, "defaultinput": 19, "defaultlabel": 75, "defaultvalu": 75, "defin": [1, 2, 3, 5, 7, 13, 15, 18, 20, 21, 26, 28, 29, 31, 33, 35, 36, 37, 42, 47, 57, 67, 69, 71, 78], "definit": [37, 69], "degrad": [42, 46, 63, 65], "del": 3, "delet": 34, "demand": 47, "demo_util": 6, "demonstr": [5, 7, 10, 15, 18, 26, 28, 29, 34, 35, 57, 65, 67, 69, 70, 72, 75, 76, 78], "denot": [10, 62], "dens": [6, 40, 42, 46, 52, 53, 54, 62, 70, 76], "dense1": 40, "dense_1": 76, "dense_1__activation__": 76, "dense_1__activation___output": 76, "dense_1_output": 76, "dense_2": [65, 76], "dense_2__activation__": 76, "dense_2_output": 76, "depend": [8, 10, 13, 25, 34, 35, 36, 42, 43, 46, 49, 51, 54, 59, 60, 63, 65, 71, 72], "deploi": [30, 37, 39, 42, 61, 67, 68, 72], "deploy": [16, 28, 32, 38, 48, 53, 61, 67], "deprec": [7, 20, 72], "depth": 78, "depthestim": 78, "depthwis": 65, "depthwiseconv": 65, "dequant": [58, 65], "deriv": 46, "desc": 37, "descent": [51, 76], "describ": [1, 2, 4, 5, 8, 15, 16, 20, 21, 23, 25, 26, 28, 33, 36, 39, 40, 41, 42, 47, 52, 56, 57, 65, 71], "descript": [2, 6, 8, 15, 18, 19, 20, 21, 22, 23, 26, 34, 61, 63, 68, 71, 72, 75, 76, 77], "deselect": 57, "design": 8, "desir": [31, 41, 51, 60, 63, 72, 75], "detach": [8, 11, 67], "detail": [0, 7, 8, 10, 13, 16, 19, 20, 21, 23, 24, 26, 28, 29, 32, 33, 38, 40, 43, 47, 52, 54, 57, 59, 61, 63, 64, 67, 69, 70], "detect": [22, 26, 40, 49], "detectinghumanbodyposesinanimag": 78, "determin": [11, 16, 21, 23, 24, 35, 44, 47, 71], "dev": [6, 15, 70], "develop": [13, 23, 28, 31, 34, 64], "deviat": [5, 8, 24, 46, 51], "devic": [2, 19, 20, 21, 24, 26, 34, 37, 42, 49, 52, 54, 59, 60, 63, 64], "di": 11, "diagram": 34, "dialect": 33, "dict": 78, "dictionari": [1, 5, 19, 30, 47, 52], "dictionarytyp": 8, "didn": 17, "differ": [6, 18, 20, 21, 24, 26, 30, 34, 35, 37, 38, 42, 43, 46, 47, 51, 52, 57, 58, 60, 61, 62, 63, 65, 69, 72], "differenti": [49, 51, 57, 62], "difficult": [7, 72], "diffus": [30, 34, 61], "digest": 67, "digit": 76, "digitprob": 76, "digitprobabilities_tru": 76, "dim": [10, 11, 31, 36, 40, 41], "dimens": [6, 8, 10, 11, 18, 20, 31, 34, 47, 58, 75, 77], "dimension": 77, "diningt": [5, 78], "dir_path": 7, "direct": 2, "directli": [2, 3, 6, 7, 12, 16, 20, 28, 29, 30, 35, 36, 37, 39, 40, 42, 43, 47, 56, 60, 61, 63, 67, 69, 70, 72], "directori": [2, 7, 20, 26, 34, 68, 70], "dirs_exist_ok": 34, "disabl": [3, 8, 13, 71], "disappear": 42, "discret": [40, 41, 43], "discuss": [23, 41], "disk": [2, 24, 30, 34, 41, 43, 47, 60, 65, 69], "displai": [1, 8, 21, 24, 26, 29], "display_segment": 5, "dispos": 3, "dist": 25, "distanc": 46, "distilbert_model": 15, "distilberttoken": 15, "distinguish": 2, "distribut": [1, 6, 25, 71], "divid": [8, 40, 53, 63], "dkm": [41, 46, 47, 60], "dkmpalett": [41, 46, 47, 60, 62], "dkmpalettizerconfig": [47, 60], "do": [2, 8, 10, 11, 20, 21, 22, 26, 28, 30, 33, 34, 37, 39, 40, 43, 47, 57, 60, 63, 67, 69, 71, 72], "do_quant": 65, "doc": [23, 54, 59], "doc_str": 19, "document": [3, 5, 21, 26, 64], "doe": [6, 25, 26, 28, 31, 32, 34, 37, 40, 42, 56, 65, 71, 72], "doesn": [21, 23, 42], "dog": [5, 15, 78], "domain": 78, "domin": 26, "don": [3, 6, 23, 28, 30, 33, 34, 35, 36, 67, 78], "done": [41, 47, 51, 62, 63, 67], "doubl": [5, 15, 18, 24, 26, 76, 78], "down": [19, 25, 42, 43, 54, 59, 61, 65], "down_block": 61, "down_block_res_sampl": 61, "download": [2, 5, 6, 25, 49, 51, 54, 59, 69, 70, 78], "download_file_and_unzip": 7, "drag": [5, 24, 26, 78], "dram": 43, "dramat": [42, 61], "draw": [18, 77], "drop": [49, 60, 63], "dropdown": 23, "dropout": [8, 13, 76], "dtype": [10, 11, 15, 19, 24, 31, 33, 34, 41, 42, 47, 57, 63, 67, 70], "due": [47, 52, 58, 61], "dummi": [11, 41], "dump": [5, 78], "duplic": 23, "dure": [1, 3, 7, 16, 18, 20, 21, 22, 24, 26, 34, 35, 37, 39, 40, 46, 49, 51, 57, 58, 59, 60, 64, 67, 68, 69, 71, 72], "dynam": [20, 31, 42, 57, 71], "dynamic_shap": [11, 31], "e": [11, 13, 31, 41, 43, 47, 54, 59, 62, 63], "e8bea591e72c": 75, "each": [2, 5, 6, 8, 18, 20, 22, 23, 24, 30, 33, 34, 37, 40, 41, 42, 46, 47, 48, 49, 51, 52, 53, 56, 58, 59, 60, 67, 71, 72, 75, 76, 77], "earlier": 13, "eas": 59, "easi": 19, "easier": [5, 60], "easiest": [36, 46], "easili": [2, 6, 20, 30, 33, 46, 63], "edg": 2, "edit": [16, 30], "educ": 64, "effect": [47, 57, 63], "efficaci": 51, "effici": [2, 12, 13, 18, 19, 22, 24, 32, 37, 42, 53, 56, 63, 67], "eight": 10, "einstein": 3, "einsum": 3, "either": [2, 8, 13, 16, 25, 26, 28, 32, 33, 38, 42, 47, 51, 52, 53, 62, 63, 68, 71], "element": [21, 30, 42, 46, 47, 48, 51, 52, 54, 57, 65], "elicit": 10, "elig": 72, "elimin": [22, 38], "els": [7, 31, 35, 63, 72, 75], "elu": 3, "emb": [40, 77], "embed": [2, 8, 11, 18, 31, 42, 49, 51, 54, 59, 67], "embed_s": 67, "embedding_model": 77, "embedding_path": 77, "embedding_spec": 77, "emploi": [6, 28, 59], "empti": [18, 30, 75, 77], "enabl": [2, 20, 24, 32, 34, 35, 43, 47, 48, 57, 60, 65, 69], "enable_per_channel_scal": [42, 47], "encapsul": 29, "encod": [1, 15, 35, 61, 67], "encoder_hidden_st": 61, "encount": [3, 19, 35, 54], "encourag": 23, "end": [6, 7, 10, 34], "end_step": 67, "end_step_dim": 67, "endpoint": 19, "enforc": [53, 71], "eng": 75, "engin": [21, 28, 30, 34, 38, 41, 42, 43, 49, 54, 57, 58, 59, 60, 63, 64, 71, 72], "english": 7, "enhanc": [23, 68], "enough": [60, 71], "ensembl": 64, "ensur": [3, 7, 8, 13, 21, 23, 26, 40, 41, 57, 61, 70], "enter": [15, 23], "entir": [2, 6, 35, 48, 71], "entri": [26, 34, 40, 41, 48], "entropi": 76, "enumer": [24, 26, 28, 31, 47, 57, 60], "enumerated_shap": 21, "enumeratedshap": [20, 21, 31, 32], "env": [7, 25, 72], "environ": [7, 11, 31, 72], "eo": 10, "epoch": [47, 52, 60, 76], "epsilon": 33, "equal": [60, 61, 71], "equat": [2, 3, 24, 58], "equival": [24, 59], "error": [3, 5, 23, 33, 42, 48, 56, 57, 63, 72, 75, 76], "especi": [2, 28, 38, 42, 59], "essenti": 3, "establish": [2, 10, 11], "estim": [42, 56, 57, 63, 77, 78], "etc": [13, 26, 29, 40, 41, 42, 44, 47, 51, 61, 67], "eval": [5, 8, 10, 11, 13, 21, 28, 31, 34, 37, 41, 47, 57, 60, 67], "evalu": [6, 7, 18, 19, 24, 26, 34, 42, 46, 51], "evaluate_classifi": 30, "evaluate_regressor": 30, "evaluate_transform": 30, "even": [23, 31, 34, 36, 41, 42, 43, 59, 60, 63, 71, 72], "ever": 71, "everi": [11, 47, 52, 54], "evolut": 16, "evolv": [2, 3], "exact": [40, 57], "exactli": [30, 33, 35, 51, 72], "examin": [23, 24], "exampl": [0, 5, 6, 7, 8, 9, 10, 11, 13, 15, 16, 17, 19, 20, 21, 22, 23, 25, 31, 32, 33, 35, 36, 40, 42, 43, 48, 52, 53, 63, 65, 70, 71, 73, 74, 75, 76, 77], "example_imag": [1, 24, 26], "example_input": [8, 13, 20, 21, 24, 28, 31, 34, 41, 47, 57, 60, 63], "example_input_id": 11, "except": [2, 6, 21, 30, 33, 47], "exclud": 24, "execut": [2, 7, 8, 9, 16, 20, 22, 24, 26, 28, 31, 34, 43, 61, 65], "exercis": 60, "exist": [2, 7, 20, 21, 23, 37, 76], "expand": 71, "expand_dim": [7, 8, 72], "expect": [1, 3, 6, 8, 24, 26, 41, 59, 70, 72, 76, 77], "expected_output": 30, "expens": [34, 46], "experi": [35, 36, 43, 46, 51, 52, 60, 61, 63, 65, 71], "experiment": [10, 35, 51, 56, 57, 62, 63], "explain": [10, 35, 41], "explan": 57, "explicit": [2, 12, 67, 71], "explicitli": [40, 67, 71], "explor": [6, 42, 51, 56, 60, 61], "export": [5, 6, 12, 13, 18, 20, 28, 39, 41, 42], "export_dir": 6, "exported_model": 31, "exported_program": [5, 8, 11, 13], "exportedprogram": [8, 31, 68], "express": [2, 3, 21, 31, 40, 58], "extend": [11, 51, 65, 77], "extens": [5, 8, 16, 28, 34, 57, 60], "extra": [19, 38, 42, 57, 61, 67], "extract": [5, 6], "extractal": 7, "extractor": [37, 77], "f": [7, 31, 35, 36, 47, 67, 69, 72], "face": 67, "facebook": 42, "fact": 3, "factor": [34, 42, 44, 46, 51, 54, 58, 63], "fair": 67, "faithfulli": 72, "fake": 57, "fakequant": 57, "fals": [3, 11, 15, 19, 33, 42, 67, 69, 76], "famili": [11, 64], "familiar": 25, "faq": 0, "far": 6, "farmer": 11, "fast": [21, 28, 38, 42, 72], "fast_neural_style_wav": 72, "faster": [7, 34, 41, 43, 59, 62, 63, 67], "fastest": [56, 63], "fastpredict": 34, "father": 11, "fc": [52, 67], "featur": [0, 1, 2, 6, 12, 18, 20, 21, 23, 24, 26, 29, 32, 34, 37, 40, 47, 67, 72, 77, 78], "feature_nam": 21, "featuretypes_pb2": [30, 76], "fed": [6, 10, 52, 67], "feed": [2, 30, 47, 61, 69, 76], "feed_dict": [7, 69, 72], "feet": 29, "few": [2, 6, 7, 33, 34, 40, 41, 42, 43, 44, 46, 51, 53, 56, 57, 63, 71, 76], "fewer": [42, 58], "field": [8, 15, 40], "field_nam": 40, "fig": 72, "figur": [2, 21, 24, 25, 37, 48, 67, 78], "file": [2, 5, 7, 8, 15, 16, 20, 23, 25, 26, 27, 28, 29, 30, 34, 47, 49, 51, 52, 54, 57, 59, 61, 64, 69, 70, 72, 76, 78, 79], "filename_tensor_nam": 69, "fill": [15, 23, 67], "filter": 23, "final": [2, 6, 22, 34, 41, 46, 47, 51, 52, 57, 60, 61, 63, 67, 69, 70, 76], "final_lay": 57, "finalized_model": 41, "find": [2, 7, 13, 23, 26, 28, 33, 38, 43, 47, 51, 52, 63, 65], "finder": [5, 15, 26, 78], "fine": [18, 30, 37, 42, 43, 49, 51, 52, 54, 58, 62, 64, 71, 76], "finer": 65, "finish": [5, 26], "finishmysent": 10, "finit": [20, 21], "first": [3, 5, 7, 8, 16, 21, 23, 24, 26, 28, 29, 31, 33, 34, 37, 39, 40, 41, 42, 57, 63, 64, 67, 69, 70, 72, 77], "first_lay": 57, "fisher": [42, 46], "fit": [29, 66], "five": [33, 35, 52], "fix": [6, 7, 8, 10, 21, 23, 34, 61, 69], "flag": [6, 7, 19, 47, 71], "flatten": [7, 70, 72, 76], "flatten_1_output": 76, "flexibl": [2, 4, 5, 6, 16, 69, 71, 73], "flexible_shape_util": 21, "float": [2, 18, 20, 32, 38, 39, 40, 41, 43, 48, 53, 58, 67, 71, 72, 77, 78], "float16": [16, 24, 30, 32, 41, 43, 46, 49, 51, 54, 56, 59, 60, 61, 62, 63, 67, 72], "float16computeprecis": 71, "float32": [6, 7, 8, 11, 16, 20, 24, 30, 32, 33, 34, 42, 49, 57, 67, 69, 70, 71, 72, 77], "float_v2_1": 69, "floatinputtyp": 19, "flow": [2, 8, 10, 13, 35, 36, 47, 57, 61, 63], "flush": 6, "fly": [43, 62], "fname": 7, "focu": 6, "focus": 67, "folder": [7, 8, 25, 34, 78], "follow": [1, 2, 3, 5, 6, 7, 8, 10, 11, 13, 15, 16, 17, 18, 19, 20, 21, 23, 24, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 47, 48, 52, 53, 56, 57, 58, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 74, 76, 77, 78], "fontsiz": 72, "food": [26, 61], "footprint": [34, 43, 63, 64], "forev": 11, "forget": 40, "fork": [23, 25], "form": [1, 2, 5, 37, 43, 46, 47], "format": [0, 2, 7, 8, 9, 10, 12, 16, 18, 19, 20, 21, 23, 26, 27, 28, 34, 38, 39, 40, 41, 47, 48, 53, 54, 57, 62, 63, 64, 66, 69, 71, 75, 78, 79], "former": 56, "formula": 24, "forward": [5, 10, 21, 24, 31, 35, 36, 37, 41, 42, 47, 51, 57, 60, 61, 67], "found": [40, 57], "foundat": [2, 16], "four": [35, 63], "fourth": 65, "fp16": [19, 30, 40, 41, 42, 67], "fp16computeprecis": 72, "fp32": [19, 30, 32, 33], "fpath": 7, "fraction": [37, 53], "fragment": 35, "framework": [1, 2, 3, 23, 24, 25, 26, 30, 33, 34, 67, 68, 71, 73], "free": [10, 41, 43, 46, 51, 61, 62], "freez": [28, 69], "freeze_graph": 69, "frequenc": 6, "frequent": 20, "friendli": 47, "from": [0, 1, 2, 3, 5, 6, 7, 10, 11, 13, 15, 16, 18, 19, 20, 23, 24, 26, 29, 30, 32, 33, 35, 36, 37, 40, 41, 42, 43, 46, 47, 48, 49, 51, 52, 53, 54, 57, 58, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 72, 73, 75, 76, 77, 78], "from_dict": [41, 42, 47, 52, 57, 60, 63], "from_numpi": [8, 40], "from_pretrain": [3, 10, 11, 15, 42], "from_typ": 30, "from_yaml": [47, 52, 57], "fromarrai": [5, 34], "frontend": [2, 3, 19, 26], "frontend_tensorflow2": 26, "frozen": [7, 18, 20, 28, 68], "frozen_graph": 28, "frozen_graph_fil": 69, "fruit": 61, "full": [18, 29, 30, 34, 39, 43, 47, 51, 57, 63, 65, 78], "fulli": [2, 20, 28, 36, 39, 40, 43, 52, 59, 61, 76], "function": [2, 5, 6, 19, 20, 23, 24, 30, 31, 33, 35, 36, 37, 41, 42, 43, 46, 47, 63, 67, 68, 76], "function_nam": [30, 37], "further": [2, 26, 42, 43, 48, 52, 53, 56, 58, 59, 61, 71], "fuse": [22, 43], "fuse_elementwise_to_batchnorm": 22, "futur": [2, 16, 62], "futurewarn": 5, "fx": 57, "g": [7, 13, 31, 41, 43, 47, 54, 59, 62, 63, 72], "gain": [42, 43, 59, 60, 63], "galleri": 26, "gb": [30, 42, 61], "gelu": 70, "gelu_tanh_activ": 70, "gener": [2, 8, 10, 13, 24, 26, 28, 31, 32, 33, 34, 35, 36, 37, 40, 41, 42, 43, 46, 51, 52, 54, 56, 61, 64, 65, 67, 68, 69, 70, 72], "generated_tensor": 10, "generated_text": 10, "generated_text_torch": 10, "german": 61, "get": [0, 1, 2, 5, 6, 7, 13, 16, 18, 19, 20, 24, 25, 28, 29, 33, 34, 37, 41, 42, 43, 47, 51, 57, 60, 61, 62, 63, 67, 69, 71, 76, 78], "get_allowed_shape_rang": 39, "get_c4": 42, "get_calibration_data": 47, "get_compiled_model_path": 34, "get_concrete_funct": 70, "get_custom_layer_nam": 39, "get_dataload": 47, "get_oper": [7, 69, 72], "get_peft_model": 37, "get_shap": [7, 69], "get_spec": [8, 16, 20, 29, 30, 34, 72, 77], "get_torch_model": 47, "get_weights_metadata": [30, 47, 52], "gfile": 69, "github": [13, 20, 23, 25, 26, 31, 77, 78], "give": [42, 44, 49, 59, 60, 63, 71], "given": [5, 10, 19, 26, 40, 43, 46, 51, 63, 67, 76, 78], "glass": 61, "glm": 2, "global": [24, 47], "global_config": [30, 41, 42, 47, 52, 57, 60, 61, 63], "global_variables_initi": 69, "go": [6, 25, 37, 40, 42, 43, 51, 60, 67, 75], "goal": [43, 60, 63], "goe": [54, 63], "golden": 7, "golden_retriever_carlo": 7, "good": [11, 23, 26, 32, 42, 43, 46, 49, 60, 61, 63, 72], "googl": 70, "googleapi": [7, 8, 26], "googlecreativelab": 77, "got": 40, "gpt": 61, "gpt2": 10, "gpt2lmheadmodel": 10, "gpt2token": 10, "gptq": [57, 62], "gpu": [2, 20, 28, 34, 38, 42, 43, 59, 64, 67, 71, 72], "grab": 3, "gradient": [42, 43, 46, 47, 51, 57, 63, 76], "gradual": [13, 31, 52], "grain": [30, 52, 58], "granular": [2, 40, 41, 42, 46, 47, 52, 56, 57, 60, 61, 63], "graph": [2, 4, 6, 18, 20, 26, 28, 31, 32, 33, 35, 36, 40, 57, 68, 71, 72, 73], "graph_def": 69, "graph_def_fil": 69, "graph_pass": 22, "graphdef": [7, 69, 72], "graphic": [2, 28, 34, 38], "graphmodul": 57, "grayscal": [20, 38, 76, 77], "grayscale_float16": 24, "great": [59, 64], "greater": [19, 30, 52], "greedili": 11, "green": 24, "green_bia": 24, "group": [40, 42, 47, 48, 49, 53, 60, 62, 63], "group_axi": 40, "group_siz": [40, 42, 47, 48, 60, 61, 63], "grow": 23, "guarante": [21, 71], "guid": [0, 20, 22, 25, 43, 51, 52, 57, 64, 70], "guidelin": 23, "gz": 7, "h": [24, 61], "h5": [28, 68, 70, 76], "h5py": 26, "ha": [2, 6, 7, 13, 16, 19, 20, 21, 23, 24, 28, 31, 33, 34, 35, 36, 37, 40, 41, 46, 47, 52, 57, 60, 67, 71, 76, 77], "half": [43, 61, 65], "hand": [35, 49, 53, 59, 60, 76], "handl": [2, 3, 6, 7, 18, 40, 65, 69], "handwriten": 76, "handwritten": 76, "happen": [10, 35, 43, 49], "hard": [11, 40, 72], "harder": 21, "hardwar": [19, 20, 43, 44, 53, 54, 56, 57, 58, 59, 71, 72], "has_custom_lay": 39, "hash": 37, "have": [6, 7, 8, 18, 20, 21, 23, 24, 26, 28, 30, 31, 33, 34, 35, 37, 40, 42, 43, 46, 47, 48, 51, 52, 57, 60, 61, 62, 65, 67, 71], "hdf5": [28, 68, 70], "he": 11, "head": [25, 37, 67], "heart": 77, "heavi": 6, "height": [24, 34, 76, 77], "hello": 15, "help": [8, 23, 38, 41, 42, 43, 52, 54, 56, 57, 58, 60, 64, 67], "henc": [13, 42, 67, 70], "here": [11, 13, 19, 31, 33, 37, 40, 41, 42, 43, 44, 47, 52, 54, 57, 59, 60, 61, 67], "hesit": 23, "hessian": [46, 51], "hexcod": 78, "hf": 11, "hi": 11, "hidden": 6, "hidden_st": 61, "high": [29, 43, 48, 63, 72], "higher": [7, 16, 20, 42, 43, 46, 47, 51, 54, 60, 63, 71, 72], "highest": [7, 11], "highli": [41, 43], "highlight": [15, 20, 25, 61], "hint": 34, "hit": [13, 31], "honor": 40, "hood": 6, "hook": 52, "hors": [5, 78], "host": 26, "hour": 60, "hous": [29, 66], "housepric": [29, 34, 66], "how": [1, 2, 5, 7, 8, 10, 11, 13, 18, 19, 20, 21, 23, 24, 25, 26, 28, 29, 30, 31, 34, 37, 40, 41, 42, 46, 47, 51, 52, 60, 61, 63, 67, 69, 70, 71, 72, 76, 78], "howard": 26, "howev": [1, 5, 7, 8, 20, 21, 28, 31, 32, 34, 35, 37, 38, 40, 41, 42, 43, 46, 47, 48, 56, 57, 59, 60, 65, 67, 68, 71, 72], "http": [7, 8, 15, 23, 26, 70, 77], "hub": [5, 70], "hug": 67, "huggingfac": [15, 37, 42, 61], "husband": 11, "hyperparamet": 67, "hypothet": 63, "i": [0, 1, 2, 3, 5, 6, 7, 8, 10, 11, 12, 13, 15, 16, 19, 21, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 60, 61, 62, 63, 65, 67, 68, 69, 70, 71, 72, 73, 75, 76, 77, 78], "i32": 33, "ic": 61, "id": [7, 11, 76], "idea": [23, 43, 46, 51], "ideal": 23, "ident": [2, 43], "identifi": [1, 30], "idx": [7, 8, 57, 60], "ignor": [5, 10, 37], "illustr": [13, 37, 63, 67], "imag": [1, 4, 13, 14, 21, 26, 37, 38, 39, 49, 59, 61, 70, 72, 73, 76, 77, 78], "image_arrai": 24, "image_input": [1, 7, 8, 20, 24, 26], "imageclassifi": [26, 78], "imagefeaturetyp": [34, 76], "imagefilteringmodel": 24, "imagenet": [8, 26, 28, 49, 59, 70], "imagenet1k_v2": 41, "imagenet_slim_label": 7, "imagenetlabel": [8, 26], "imagesegment": [5, 78], "imagetyp": [1, 7, 8, 13, 18, 20, 26, 31, 32, 38, 70, 72, 76, 77], "img": [7, 8, 24, 34, 72], "img_as_np_arrai": 34, "img_np": [7, 8, 34, 72], "img_path": 8, "img_tf": 7, "img_torch": 8, "impact": [46, 51, 71], "implement": [2, 6, 18, 20, 40, 42, 43, 47, 51, 57, 62], "impli": [3, 53], "implicit": [2, 33], "implicitli": 67, "import": [1, 5, 6, 7, 8, 13, 15, 16, 17, 18, 19, 20, 21, 26, 27, 28, 29, 30, 31, 33, 34, 35, 36, 37, 40, 41, 42, 47, 52, 57, 60, 61, 63, 65, 66, 67, 69, 70, 71, 72, 75, 76, 77, 78, 79], "import_graph_def": [7, 69, 72], "impos": [53, 57], "improv": [2, 16, 20, 23, 26, 34, 39, 41, 42, 53, 54, 58, 60, 63, 67], "imshow": 72, "in_channel": [21, 35], "incept": 7, "inception_preprocess": 7, "inception_v1": 7, "inception_v1_2016_08_28_frozen": 7, "inception_v1_url": 7, "inceptionv1": 7, "inceptionv1_logits_predictions_softmax": 7, "includ": [0, 2, 5, 7, 8, 10, 20, 23, 24, 25, 26, 28, 30, 34, 35, 37, 38, 39, 53, 65, 68, 73], "incorpor": [2, 8, 26], "incorrect": 40, "increas": [42, 49, 59, 60, 61, 63], "incremenet": 52, "increment": [52, 67], "inde": 72, "independ": [37, 41, 42, 71], "index": [0, 37, 48], "index_typ": [75, 77], "indic": [3, 8, 23, 46, 48, 53, 72, 77], "individu": [54, 75], "ineffici": 24, "infer": [6, 19, 20, 28, 37, 39, 40, 41, 42, 43, 54, 56, 57, 58, 59, 60, 61, 64, 67, 69, 71], "infin": 21, "influenc": 57, "info": [31, 46, 51], "inform": [1, 5, 6, 7, 8, 15, 19, 20, 21, 23, 25, 26, 27, 28, 29, 30, 32, 33, 34, 39, 40, 42, 46, 47, 52, 66, 69, 76, 78, 79], "infrequ": 21, "ingredi": 43, "inherit": [5, 10, 67], "initi": [6, 10, 11, 21, 24, 34, 41, 47, 49, 57, 67, 69, 70], "initial_spars": 60, "initializer_nod": 69, "inject": 40, "innerproduct": 76, "inp": [20, 30, 52], "inplac": [41, 47, 52, 57, 60], "input": [1, 2, 3, 4, 11, 13, 15, 19, 22, 26, 27, 28, 31, 33, 35, 36, 37, 39, 41, 42, 46, 47, 51, 52, 56, 57, 61, 62, 63, 67, 70, 71, 72, 73, 75, 76, 77, 78, 79], "input_1": [21, 24, 26, 30], "input_2": [21, 30], "input_adpated_model_1": 37, "input_adpated_model_2": 37, "input_batch": 5, "input_binari": 69, "input_cach": [57, 61], "input_checkpoint": 69, "input_data": 30, "input_descript": [26, 29], "input_dict": 6, "input_graph": 69, "input_id": [11, 67], "input_imag": 5, "input_lay": 15, "input_length": 6, "input_mask": 15, "input_nam": [20, 21, 24, 27, 30, 34, 75, 76, 77], "input_nod": 6, "input_ord": 19, "input_sampl": 61, "input_sav": 69, "input_shap": [15, 21, 26, 28, 34, 70, 76], "input_signatur": 70, "input_spec": [19, 33, 67], "input_tensor": [5, 69], "input_typ": 19, "input_valu": 15, "input_word": 15, "inputlay": 70, "inputspec": 19, "insert": [34, 40, 46, 47, 52, 57, 77], "insid": [2, 6, 10, 35, 46], "inspect": [6, 16, 30, 67, 69, 76], "inspect_input": 6, "inspect_input_featur": 76, "inspect_lay": 76, "inspect_loss_lay": 76, "inspect_optim": 76, "inspect_tf_output": 6, "inspect_updatable_lay": 76, "instabl": 40, "instal": [0, 5, 6, 7, 8, 10, 11, 26, 31, 39, 77], "instanc": [3, 24, 29, 30, 41, 43, 63, 67], "instanti": [10, 24, 31, 34, 35, 36], "instantli": 63, "instead": [7, 24, 30, 33, 34, 35, 39, 41, 42, 57, 71, 75], "instruct": [0, 11, 19, 20, 25, 33, 34, 39, 57, 73], "int": [24, 35, 40, 78], "int32": [10, 11, 15, 19, 32, 67], "int4": [42, 43], "int8": [40, 41, 42, 43, 47, 57, 58, 59, 60, 62, 63], "integ": [10, 32, 40, 42, 56, 58], "integr": [19, 25, 26, 34, 37, 39, 61, 63, 64], "intens": [46, 63], "intent": 35, "interact": 34, "interfac": [19, 24, 26, 29, 37, 61], "intermedi": [0, 2, 3, 13, 16, 19, 20, 58, 63, 65, 67, 71], "intern": [2, 20, 39, 67], "interpol": 58, "interv": [7, 21, 24, 26, 70], "intinputtyp": 19, "introduc": [2, 12, 13, 19, 20, 40, 52, 54, 57, 67], "introduct": [13, 35, 43, 52], "invalid": 75, "inverse_dist": [75, 77], "investig": 23, "invoc": 63, "invok": [3, 8, 24, 31, 34, 35, 36, 37, 40, 47, 61, 69], "involv": [6, 46, 61, 63, 67], "io": [13, 16, 20, 21, 24, 26, 38, 39, 42, 49, 54, 59, 61, 68, 69], "ios11": 68, "ios14": [16, 17, 68], "ios15": [16, 17, 24, 28, 32, 38, 68], "ios16": [24, 32, 48, 53, 62], "ios17": [40, 49, 54, 57, 62], "ios18": [37, 40, 42, 43, 47, 48, 49, 53, 59, 60, 62, 63, 67], "ipad": 61, "iphon": [30, 42, 43, 49, 54, 57, 58, 59, 60, 61], "ipython": [24, 75], "ir": [2, 20], "irrelev": 2, "irrespect": 71, "is_custom_op": 19, "is_symbol": 19, "is_updat": 75, "isinst": 26, "isn": [61, 67], "issu": [13, 20, 31], "isupdat": 77, "italic": 42, "item": [30, 35], "iter": [30, 41, 47, 52, 67], "its": [1, 2, 8, 10, 11, 13, 19, 24, 25, 26, 28, 29, 30, 31, 34, 36, 37, 41, 43, 51, 53, 54, 59, 61, 62, 64, 67, 69, 71, 72, 75], "itself": [2, 42, 71], "j": 78, "java": 29, "jit": [5, 8, 10, 13, 21, 24, 28, 31, 34, 36, 37, 41, 47, 57, 63, 67], "job": 25, "john": [11, 29], "join": [7, 69], "joint": [40, 43], "joint_compress": 41, "joint_compressed_mlmodel": 40, "joint_compressed_model": [40, 41], "jointli": [51, 54], "jpeg": [24, 34], "jpg": [1, 5, 7, 8, 24, 26, 34, 72], "json": [5, 78], "just": [2, 3, 6, 20, 35, 40, 43, 47, 49, 57, 63, 67, 72], "k": [7, 18, 19, 31, 42, 49, 62, 63, 65, 67, 75, 77], "k_cach": 67, "kd_tree": 75, "keep": [35, 36, 42, 43, 46, 64, 67], "keep_dim": 33, "keepdim": 10, "kei": [8, 20, 29, 30, 32, 39, 40, 46, 47, 67, 78], "kenlm": 6, "kept": 65, "kera": [3, 15, 18, 23, 24, 26, 28, 39, 68, 76], "keras_convert": 76, "keras_model": [1, 24, 26, 76], "keras_model_path": 76, "keras_url": 76, "keraslay": [15, 70], "kerasmnist": 76, "kernel": [43, 62], "kernel_s": [21, 35, 76], "keys_vector": 8, "kick": 47, "kind": [1, 2, 43, 51], "kmean": [30, 47, 61, 63, 65], "kmeans_lut": 65, "knearestneighborsclassifi": 75, "knearestneighborsclassifierbuild": [75, 77], "knn": 75, "knn_builder": 77, "knn_spec": 77, "knob": 47, "know": [3, 21, 26, 30, 33, 51, 69, 72], "knowledg": 42, "known": [19, 51, 56, 58, 59, 61, 68], "kv_cache_st": 67, "kvcache_shap": 67, "kwarg": [19, 65], "l2": [51, 56], "label": [1, 5, 7, 26, 41, 52, 57, 60, 75, 76, 77, 78], "label_fil": 7, "label_url": [8, 26], "labels_json": [5, 78], "lack": [3, 69], "lambada_openai": 42, "lambda": 47, "lanczo": [7, 8], "landscap": 61, "languag": [0, 2, 3, 6, 12, 13, 16, 19, 20, 21, 28, 29, 51, 52, 61, 67], "larg": [18, 20, 21, 30, 34, 37, 42, 43, 46, 48, 51, 58, 61, 63, 67, 71], "large_weight": 30, "larger": [30, 42, 46, 54, 57], "largest": 61, "last": [25, 30, 35, 41, 57, 75, 76, 77], "last_weight_nam": 30, "latenc": [20, 34, 39, 41, 43, 49, 51, 53, 54, 58, 59, 63, 64], "latent": 61, "later": [21, 62, 63, 67, 76], "latest": [20, 25], "launch": [5, 15, 26, 78], "layer": [2, 6, 15, 20, 30, 31, 33, 35, 36, 37, 40, 42, 51, 52, 53, 56, 57, 61, 67, 68, 70, 71, 76, 77], "layer1": 36, "layer2": 36, "layerwis": 56, "layerwise_compress": [56, 61, 62, 63], "layerwisecompressor": [51, 52, 56, 57, 61], "layerwisecompressorconfig": [52, 57, 61], "lead": [40, 42, 43, 46, 48, 49, 51, 59, 63], "leaf": 75, "learn": [0, 2, 3, 5, 8, 13, 18, 24, 26, 29, 30, 37, 38, 39, 40, 43, 46, 51, 57, 60, 64, 68, 70, 71, 73, 78], "learningr": 76, "learnt": [42, 57], "least": 32, "leav": 67, "left": [2, 5, 26, 78], "leftmost": 1, "len": [7, 8, 26, 69, 72], "length": [6, 11, 21, 42, 48], "less": [20, 30, 34, 35, 39, 40, 42, 43, 51, 60, 64, 65], "let": [7, 8, 13, 24, 34, 42, 43, 47, 51, 57, 60, 61, 65, 67, 75, 76], "level": [2, 6, 8, 29, 33, 35, 40, 43, 47, 51, 52, 53, 54, 57, 61, 63, 70, 72], "leverag": [43, 58, 60, 64], "levl": 51, "liang": 26, "librari": [0, 2, 3, 7, 13, 25, 72], "libsvm": [0, 18, 64, 73], "libsvm_model": 27, "licens": [23, 26, 29, 75, 76, 77], "lie": 7, "life": 11, "lift": 6, "like": [2, 5, 6, 8, 18, 26, 34, 35, 40, 49, 51, 57, 63, 67, 76], "limit": [2, 5, 13, 21, 28, 40, 42, 57, 68], "line": [2, 13, 25, 31, 63, 76], "linear": [0, 2, 31, 37, 40, 42, 43, 47, 52, 53, 54, 57, 58, 63, 64, 65, 67, 75, 77], "linear1": 37, "linear2": 37, "linear_2": 40, "linear_config": [47, 52], "linear_model": [29, 66], "linear_quantize_activ": [56, 57, 60, 62, 63], "linear_quantize_weight": [41, 42, 56, 57, 62, 63], "linear_symmetr": [41, 57, 63, 65], "linear_weight_quantize_config": 41, "linearli": [54, 57, 58], "linearquant": [41, 56, 57, 60, 62], "linearquantizerconfig": [41, 57, 60], "linearregress": [29, 66], "linearsymmetr": 65, "link": [25, 34, 46, 49, 51, 54, 59, 72], "linux": [16, 25, 34, 73], "list": [8, 10, 11, 19, 23, 29, 30, 33, 39, 40, 52, 57, 60, 62, 63, 65, 67, 68, 69, 70, 76], "listinputtyp": 19, "literatur": 49, "littl": [42, 61], "ll": [36, 61], "llama": 11, "lm": 67, "lm_logit": 42, "load": [1, 3, 6, 9, 13, 15, 16, 18, 19, 20, 21, 24, 30, 33, 34, 37, 38, 41, 42, 47, 49, 54, 59, 65, 66, 69, 70, 71, 72, 78], "load_data_it": 61, "load_dataset": 42, "load_imag": 34, "load_image_as_numpy_arrai": 34, "load_model": [70, 76], "load_spec": [21, 29, 76], "load_state_dict": 67, "loaded_model": 26, "local": 7, "locat": [34, 53, 57], "log": [23, 25, 33], "log10": 72, "log_epsilon_0": 33, "logic": 35, "logit": [6, 7, 11, 41, 42, 67], "logits_sequ": 6, "logsoftmax": 41, "long": [6, 20, 34, 72], "longer": [20, 21, 31, 39, 54, 68, 72], "look": [3, 13, 16, 41, 42, 60, 61, 63, 67, 72], "lookup": [40, 41, 43, 46, 47, 48, 61, 63, 65], "loop": [6, 10, 13, 35, 36, 47, 57], "loop_bodi": 35, "loop_count": 35, "lora_alpha": 37, "lora_config": 37, "loraconfig": 37, "lose": [43, 60, 63, 65], "loss": [41, 42, 46, 47, 51, 52, 57, 60, 61, 63, 65, 76], "loss_fct": 42, "loss_fn": [42, 47, 57], "loss_funct": 63, "losslay": 76, "lost": [56, 61], "lot": [40, 51, 54], "love": 11, "low": [37, 40, 46, 70], "lower": [7, 42, 43, 46, 48, 63, 65], "lower_bound": [21, 67], "lowest": [51, 52, 53], "lr": [41, 76], "lstm": 6, "lut": [40, 41, 46, 47, 49, 60, 61, 62, 63], "lut1": 40, "lut2": 40, "lut_1_param": 40, "lut_2_param": 40, "lut_dtyp": [41, 47], "m": [25, 42, 49, 51, 52, 53, 54, 59, 61, 67], "m1": 34, "m3": 67, "m4": [41, 43, 58, 59, 60, 62], "mac": [5, 15, 25, 26, 42, 43, 78], "macbook": [34, 67], "machin": [2, 3, 26, 29, 37, 64, 71], "maco": [10, 11, 13, 16, 24, 26, 38, 39, 42, 61, 64, 68, 73], "macos10": 68, "macos12": [16, 17, 24, 28, 32, 38, 68], "macos13": [24, 32, 40, 48, 53, 62], "macos14": [49, 62], "macos15": [37, 41, 43, 47, 48, 49, 53, 59, 62, 67], "macosx_10_12_intel": 25, "made": [26, 34, 42, 54], "magnitud": [18, 41, 51, 52, 53, 62], "magnitudeprun": [41, 52, 54, 60, 62], "magnitudeprunerconfig": [41, 52, 60], "magniutdeprun": 52, "magniutdeprunerconfig": 52, "mai": [2, 3, 7, 8, 13, 19, 20, 21, 23, 24, 26, 28, 30, 34, 37, 38, 41, 42, 43, 44, 49, 51, 54, 56, 57, 58, 59, 60, 62, 63, 65, 67, 71, 72], "main": [2, 30, 33, 35, 37, 61], "main_1": 30, "main_2": 30, "mainli": 63, "maintain": [20, 43, 51], "mainten": 68, "major": [2, 10, 20, 68, 71], "make": [1, 5, 6, 18, 20, 21, 23, 24, 27, 28, 29, 31, 33, 34, 35, 36, 43, 52, 64, 67], "make_st": 67, "makedir": 7, "man": [11, 78], "manag": [6, 24, 25], "mandatori": 19, "manhattan": 10, "mani": [2, 13, 37, 63], "manipul": 34, "manner": [10, 11, 30, 42, 51, 52, 53, 57, 62, 67], "manual": 29, "map": [30, 34, 49, 58, 72], "margin": 60, "mari": 11, "mark": [21, 26, 42, 76], "mask": [5, 51, 53], "massiv": [42, 51, 52], "master": [23, 25, 26], "match": [2, 6, 7, 19, 30, 34, 40, 61, 65, 67, 71, 72], "materi": 64, "mathemat": [2, 3, 58], "matmul": [3, 69, 70], "matplotlib": [7, 72], "matric": [42, 46, 47, 48, 53], "matrix": [3, 42, 46, 48, 51], "matter": 43, "max": [7, 11, 31, 34, 58, 67, 72, 76], "max_error": 30, "max_pool2d": 36, "max_seq_len": 67, "max_seq_length": 15, "max_sequence_length": 11, "max_signal_energi": 72, "max_time_step": 6, "maxim": 71, "maximum": [5, 21, 47], "maximum_sequence_length": 15, "maxpooling2d": 76, "mb": [3, 19, 33, 60, 61, 67], "md": [22, 23, 25], "mean": [5, 6, 8, 24, 30, 35, 40, 41, 42, 49, 51, 57, 59, 61, 62, 63, 65, 69, 71, 76, 77], "meaning": 51, "measur": [34, 42, 57, 60, 61], "median": [49, 54, 59], "meet": [51, 60], "mel": 6, "member": [19, 23], "memori": [2, 6, 13, 21, 26, 34, 41, 42, 43, 46, 49, 53, 54, 58, 59, 63, 64], "menglong": 26, "mention": [49, 52, 54, 59, 62], "menu": 23, "merg": [25, 37], "merge_chunks_to_pipelin": 30, "messag": [2, 8, 26], "meta": 11, "metadata": [1, 15, 16, 18, 28, 76, 77, 78], "metadata_vers": 40, "metal": 2, "method": [0, 7, 8, 10, 11, 13, 16, 20, 24, 25, 26, 28, 29, 30, 32, 34, 40, 41, 42, 44, 47, 49, 52, 56, 57, 60, 62, 63, 64, 67, 68, 69, 70, 71, 72, 73, 75, 76], "metric": [30, 49, 59, 63, 72, 76], "mfcc": 6, "mid": 61, "mid_block": 61, "might": 10, "mil": [16, 20, 22, 26, 32, 40, 68], "mileston": [47, 57, 60], "million": 10, "millisecond": 43, "milspec": 68, "min": [11, 31, 42, 58, 76], "mind": 46, "minibatchs": 76, "miniconda": [7, 25, 72], "minim": [29, 51, 56, 63, 64], "minimum": [2, 16, 21, 38, 43, 47, 67, 71], "minimum_conv_kernel_channel": 65, "minimum_conv_weight_count": 65, "minimum_deployment_target": [16, 17, 24, 28, 32, 37, 38, 40, 41, 47, 57, 63, 67, 68], "minor": 61, "mint": 61, "minut": [34, 42, 43, 60, 63], "mismatch": 71, "miss": [20, 24, 69], "mistral": 67, "mistral7b": 67, "mit": [75, 76, 77], "mix": [10, 71], "mkdtemp": 69, "ml": [1, 3, 4, 5, 6, 9, 12, 14, 15, 17, 19, 22, 23, 27, 28, 30, 31, 32, 34, 35, 36, 37, 40, 42, 43, 48, 49, 51, 53, 56, 58, 60, 61, 65, 66, 69, 70, 74, 75, 77, 78, 79], "ml_model": 30, "mlcomputeunit": [28, 71], "mlcustomlay": 19, "mlfeaturetyp": 34, "mlfeaturevalu": 34, "mlmodel": [2, 3, 5, 6, 7, 8, 10, 11, 15, 16, 20, 21, 23, 24, 26, 27, 28, 31, 32, 33, 34, 35, 37, 39, 40, 41, 47, 61, 63, 65, 66, 67, 68, 69, 70, 75, 76, 77, 78, 79], "mlmodel_1": 37, "mlmodel_2": 37, "mlmodel_compress": 63, "mlmodel_compressed_activ": 63, "mlmodel_from_export": [5, 8], "mlmodel_from_trac": [5, 8], "mlmodel_palett": 41, "mlmodel_palettized_with_8bit_lut": 41, "mlmodel_prun": 41, "mlmodel_pruned_palett": 41, "mlmodel_pruned_quant": 41, "mlmodel_updat": [75, 76], "mlmodel_updatable_path": [75, 76], "mlmodel_url": 76, "mlmodelc": [2, 34], "mlmodelconfigur": [28, 34], "mlmultiarrai": [8, 18, 32, 34, 38, 69, 70], "mlpackag": [2, 5, 7, 8, 13, 15, 16, 20, 21, 24, 26, 30, 34, 37, 43, 57, 60, 61, 62, 63, 68, 69, 70, 78], "mlpackg": 34, "mlprogram": [2, 16, 21, 24, 28, 30, 32, 37, 38, 48, 53, 57, 62, 65, 67, 68, 69, 70], "mlupdatetask": 74, "mnist": [47, 63, 76], "mnistdigitclassifi": 76, "mobil": [8, 11], "mobilebert": [13, 31], "mobilenet": [8, 13, 24, 26, 28, 31, 69, 70], "mobilenet_v1_1": 28, "mobilenet_v2": [8, 13, 28], "mobilenet_v2_050_192": 70, "mobilenet_v2_1": [28, 69], "mobilenetv2": [13, 18, 24, 26, 46, 49, 51, 54, 56, 59, 69], "mobilenetv3": [46, 49, 51, 54], "mobilevitv2": [49, 56, 59], "mod": 47, "mode": [20, 30, 40, 41, 42, 43, 47, 48, 49, 51, 57, 58, 59, 60, 61, 62, 63, 65, 68], "model": [0, 4, 12, 14, 22, 23, 27, 29, 38, 39, 43, 44, 46, 48, 51, 53, 54, 56, 58, 62, 64, 65, 66, 68, 71, 73, 75, 79], "model_4bit_palettized_with_8bit_quantized_lut": 41, "model_8bit": 65, "model_compress": 52, "model_config": 63, "model_dir": 69, "model_expected_input_shap": 34, "model_fp16": 65, "model_fp32": [65, 72], "model_from_export": 13, "model_from_tf": [19, 28], "model_from_torch": 28, "model_from_trac": 13, "model_input_nam": 72, "model_output_nam": 72, "model_path": 30, "model_pb2": 77, "model_spec": 76, "model_torch_pruned_and_palett": 41, "model_torch_pruned_and_quant": 41, "model_util": 47, "model_with_lut_weight": 40, "model_with_quantized_weight": 40, "model_with_sparse_weight": 40, "modern": 61, "modif": 61, "modifi": [29, 30, 34, 63, 67], "modul": [5, 10, 13, 21, 24, 31, 35, 36, 37, 40, 47, 49, 51, 52, 61, 67, 75], "module_name_config": [47, 57], "module_type_config": [47, 52, 57], "moduledkmpalettizerconfig": 60, "modulelinearquantizerconfig": [41, 57, 60], "modulemagnitudeprunerconfig": [41, 60], "moduleposttrainingpalettizerconfig": [41, 61], "modulesparsegptconfig": 61, "moment": 11, "momentum": 76, "monterei": [10, 11], "moon": 24, "more": [1, 2, 5, 6, 7, 8, 10, 11, 16, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 35, 37, 40, 41, 42, 43, 46, 47, 49, 51, 52, 53, 54, 57, 58, 59, 60, 63, 65, 66, 67, 68, 71, 72, 76, 78, 79], "most": [2, 3, 6, 25, 26, 28, 33, 34, 37, 39, 40, 42, 43, 46, 47, 56, 61, 63, 70, 71, 72, 75], "mostli": 59, "motorbik": [5, 78], "mountain": 61, "move": [13, 31, 39, 46, 49, 59, 60, 62, 63], "movement": 43, "mpsgraph": 20, "msg": 19, "much": [23, 28, 34, 37, 42, 43, 51, 60, 61, 63, 67, 76], "mul": 3, "multi": [18, 19, 67], "multiarrai": [20, 30, 32, 34, 78], "multiarraytyp": [76, 77], "multidimension": [24, 32, 70], "multifunct": [0, 39], "multifunctiondescriptor": 37, "multipl": [3, 21, 22, 33, 37, 42, 48, 51, 54, 63, 78], "multipli": [7, 51], "must": [2, 8, 19, 21, 23, 24, 32, 41, 67, 76], "my": [15, 20], "my_input_nam": 32, "my_model": [16, 27, 30, 79], "my_model_chunk1": 30, "my_model_chunk2": 30, "my_model_chunked_pipelin": 30, "my_output_nam": 32, "my_updated_model": 30, "mylayerselector": 65, "mymodel": [20, 21, 30], "mymodel_upd": 21, "n": [6, 7, 40, 46, 48, 49, 51, 52, 53, 54, 56, 59, 69, 72], "n_bit": [40, 41, 42, 47, 60, 61, 63], "n_class": 41, "n_m_ratio": 52, "n_step": 6, "name": [1, 2, 3, 5, 6, 7, 8, 10, 11, 15, 19, 21, 24, 25, 26, 27, 28, 29, 30, 31, 33, 37, 40, 46, 47, 49, 51, 52, 54, 56, 57, 59, 65, 67, 69, 72, 76, 77, 79], "namespac": 62, "nativ": [40, 71], "natur": 61, "navig": [5, 26], "nbit": [30, 41, 43, 47, 61, 63, 65], "ne": [20, 28, 34, 38, 42, 43, 57, 59, 60, 62, 64, 71, 72], "nearest": [18, 42, 56, 57, 74], "nearest_neighbor": [75, 77], "necessari": [21, 31, 61, 67], "need": [3, 6, 7, 8, 16, 19, 20, 21, 23, 24, 25, 26, 28, 30, 31, 33, 34, 35, 36, 39, 40, 41, 42, 43, 47, 52, 54, 57, 59, 60, 61, 63, 64, 67, 69, 71, 72, 76, 78], "neighbor": [18, 74], "neither": 38, "net": 26, "netron": 39, "network": [0, 4, 6, 7, 8, 9, 14, 16, 18, 19, 24, 28, 31, 33, 34, 35, 36, 37, 39, 47, 54, 56, 58, 59, 60, 64, 68, 69, 70, 72, 73, 74, 77, 78], "neural": [0, 4, 6, 7, 8, 9, 14, 16, 18, 19, 24, 28, 30, 33, 34, 35, 39, 41, 42, 43, 49, 54, 56, 57, 58, 59, 60, 63, 64, 68, 69, 70, 72, 73, 74, 77, 78], "neural_network": [21, 62, 65, 76], "neuralnetwork": [2, 17, 21, 26, 28, 29, 62, 65, 68, 71], "neuralnetwork_spec": 76, "neuralnetworkbuild": 76, "neuralnetworklay": 65, "neuralnetworkshap": 39, "never": 57, "new": [0, 2, 3, 4, 6, 7, 10, 13, 19, 20, 23, 34, 37, 40, 43, 47, 63, 64, 67, 68, 69, 72], "new_feature_nam": 30, "new_state_c": 6, "new_state_h": 6, "newer": [2, 5, 10, 11, 13, 17, 20, 25, 26, 28, 31, 32, 39, 41, 43, 47, 48, 53, 54, 58, 59, 60, 68, 71, 72, 78], "newest": [20, 23, 25, 26, 38, 39], "newli": [6, 7, 13, 26, 31, 40, 67, 72], "newly_computed_k": 67, "newly_computed_v": 67, "newmodel_from_export": 13, "newmodel_from_trac": 13, "next": [8, 10, 11, 34, 37, 42, 51, 56, 57, 60, 61, 67], "next_token_predictor": 10, "nll_loss": [41, 47], "nn": [5, 10, 13, 21, 24, 31, 35, 36, 37, 41, 42, 47, 57, 67, 70], "no_grad": [5, 57, 60], "node": [2, 3, 19], "nois": 72, "noise_var": 72, "non": [21, 26, 34, 40, 41, 47, 48, 52, 53, 57, 62, 67, 76], "none": [5, 6, 10, 21, 25, 30, 34, 41, 47, 52, 57, 69, 72, 75], "nop": 7, "nor": 38, "norm": [13, 51, 52, 56, 57, 60], "normal": [8, 24, 26, 28, 47, 48, 57, 70], "notat": 3, "note": [20, 21, 28, 33, 40, 41, 42, 49, 51, 54, 57, 59, 60, 62, 67], "notebook": 47, "noth": [30, 31, 63], "noutput": 7, "now": [5, 6, 7, 8, 10, 11, 13, 20, 26, 30, 34, 38, 40, 41, 42, 47, 60, 67, 68, 71, 75, 77], "np": [6, 7, 8, 10, 11, 15, 21, 24, 32, 33, 34, 37, 67, 69, 70, 72], "nprospect": 69, "nsampl": 42, "null": [6, 47, 57], "num_channel": 35, "num_epoch": [52, 60], "num_error": 30, "num_iter": 67, "num_kmeans_work": 42, "num_palettization_epoch": 47, "number": [6, 10, 18, 21, 29, 30, 37, 42, 47, 48, 49, 54, 58, 59, 60, 61, 65, 76], "number_of_dimens": [75, 77], "number_of_neighbor": 75, "number_of_neighbors_allowed_rang": 75, "number_of_neighbors_allowed_set": 75, "numberofneighbor": 75, "numer": [2, 7, 30, 40, 42, 44, 67, 69, 71], "numpi": [5, 7, 8, 10, 11, 15, 21, 24, 26, 30, 32, 33, 34, 37, 67, 69, 70, 72], "o": [7, 23, 31, 43, 44, 47, 60, 61, 62, 69], "object": [2, 3, 5, 6, 10, 16, 18, 20, 21, 26, 28, 29, 30, 31, 34, 36, 46, 47, 49, 60, 68, 69, 70], "observ": [1, 42, 43, 57, 58, 59, 60], "obtain": [6, 28, 42, 49, 51, 56, 57], "occupi": [20, 65], "occur": [3, 34, 54, 75], "off": [6, 42, 43, 47, 57, 60, 63, 72, 76, 77], "offer": [6, 13, 16, 20, 30, 39, 43, 51], "offic": 61, "offici": 20, "offset": [42, 58, 62, 63], "often": [2, 26, 42, 63], "oil": 61, "old": [7, 20, 72], "old_feature_nam": 30, "older": [16, 17, 20, 26, 28, 68], "omit": [21, 47, 67], "onc": [8, 11, 23, 35, 41, 42, 43, 47, 51, 56, 57, 60, 61, 67, 69], "one": [2, 6, 8, 10, 11, 20, 21, 24, 25, 29, 31, 33, 34, 36, 37, 43, 47, 51, 59, 60, 61, 62, 65, 67, 70, 71, 76], "onecomponent16half": 24, "ones": [6, 13, 31, 47, 63, 69], "onli": [2, 3, 5, 6, 19, 20, 23, 24, 26, 28, 30, 31, 34, 35, 36, 37, 40, 41, 42, 43, 46, 47, 49, 52, 53, 56, 57, 58, 59, 60, 61, 62, 63, 67, 68, 71, 72], "onnx": [12, 13, 20, 23, 39], "onnx_coreml": 39, "onward": 57, "op": [2, 7, 13, 19, 22, 26, 30, 31, 33, 40, 47, 52, 54, 56, 65, 67, 68, 69, 71, 72], "op_config": [41, 47, 52, 57, 61, 63], "op_name_config": [30, 47, 52], "op_typ": 30, "op_type_config": [30, 47, 52], "opactivationlinearquantizerconfig": [57, 63], "opaqu": 67, "open": [1, 7, 8, 12, 13, 15, 16, 18, 21, 23, 26, 34, 39, 72], "openelm": [11, 13, 31], "oper": [2, 4, 6, 7, 8, 13, 20, 24, 25, 31, 33, 36, 41, 42, 46, 47, 51, 57, 58, 65, 69, 70, 72, 73, 75, 77], "oplinearquantizerconfig": [41, 57, 63], "opmagnitudeprunerconfig": [41, 52], "oppalettizerconfig": [30, 41, 47, 61, 63], "opportun": 21, "opset": 2, "opt": [30, 44], "optforcausallm": 42, "opthresholdprunerconfig": 52, "optim": [7, 8, 10, 13, 19, 22, 28, 30, 32, 33, 34, 35, 38, 39, 40, 41, 43, 44, 46, 47, 49, 51, 52, 54, 56, 57, 58, 59, 64, 65, 67, 71, 76], "optimization_hint": [21, 34], "optimizationconfig": [30, 41, 47, 52, 57, 61, 63], "option": [0, 5, 16, 19, 20, 21, 22, 26, 30, 32, 33, 36, 40, 42, 43, 47, 52, 57, 59, 62, 73, 76], "orang": 23, "order": [7, 10, 19, 22, 26, 28, 30, 32, 33, 40, 44, 46, 47, 51, 60, 61, 68, 70], "org": [7, 8, 26, 70, 78], "origin": [2, 5, 7, 8, 10, 11, 24, 26, 29, 30, 35, 40, 48, 51, 56, 57, 63, 67, 68, 69, 72], "original_gdef": [7, 72], "other": [2, 6, 10, 13, 21, 23, 24, 25, 26, 34, 35, 36, 40, 42, 43, 44, 47, 53, 57, 59, 60, 65, 67, 69, 71, 73, 76], "otherwis": [35, 36, 38, 49, 51, 54, 59], "our": [15, 31, 36, 47, 60, 76], "out": [1, 5, 8, 20, 26, 29, 30, 39, 41, 43, 47, 51, 52, 54, 60, 62, 63, 64, 69, 75], "out_adpated_model_1": 37, "out_adpated_model_2": 37, "out_channel": [21, 35], "out_dict": [1, 24, 26, 34], "outer": [10, 58], "outlier": 42, "output": [1, 2, 4, 5, 6, 7, 10, 11, 13, 15, 18, 21, 22, 26, 27, 33, 35, 37, 39, 42, 47, 48, 51, 56, 57, 58, 61, 63, 67, 69, 70, 71, 73, 75, 76, 77, 78, 79], "output_1": 21, "output_2": 21, "output_descript": [26, 29], "output_dict": 34, "output_dir": 30, "output_graph": [6, 69], "output_id": 11, "output_nam": [20, 30, 69, 75, 76, 77], "output_node_nam": 69, "output_path": 77, "output_predict": 5, "output_strid": 78, "output_text": 11, "outsid": [2, 75], "over": [2, 3, 10, 13, 24, 30, 31, 35, 40, 42, 43, 49, 54, 60, 63, 67, 71, 72], "overal": [40, 71], "overhead": [32, 42], "overlai": 5, "overlaid": 5, "overrid": [16, 19, 28, 32, 38, 68], "overridden": 19, "overview": [3, 20, 22, 28, 30, 44, 45, 50, 55, 60, 63, 65], "own": [2, 18, 32, 58], "pack": [19, 53], "packag": [2, 6, 7, 8, 13, 21, 23, 24, 26, 29, 30, 37, 39, 64, 65, 69, 70, 73, 77], "pad": 35, "page": [0, 10, 20, 23, 25, 39, 43, 46, 47, 51, 63, 64, 65, 70, 71, 73, 78], "pair": 67, "palett": [0, 5, 18, 20, 30, 39, 43, 49, 51, 53, 54, 58, 62, 63], "palettization_config": [41, 47], "palettization_config_dict": 47, "palettization_scal": 40, "palettize_config": 61, "palettize_weight": [30, 41, 42, 46, 47, 60, 61, 62, 63], "palettized_coreml_model": 47, "palettized_model": [60, 63], "palettized_torch_model": [40, 47], "palettized_weight": 30, "palettizer_config": 41, "pallet": 60, "palm": 24, "palmtre": 24, "palmtrees_256_by_256": 24, "palmtrees_result": 24, "panda": [29, 66], "pane": [5, 15, 26, 78], "paper": [26, 52, 56], "paradigm": 56, "parallel": 42, "param": [5, 78], "paramet": [2, 5, 7, 8, 13, 16, 17, 18, 19, 20, 21, 22, 24, 26, 28, 29, 30, 32, 33, 34, 35, 37, 38, 40, 41, 42, 43, 46, 47, 48, 52, 57, 63, 65, 67, 68, 69, 73, 75, 76, 78], "parameter_nam": 40, "params_json": 78, "params_name_map": 30, "parent": 69, "park": 61, "pars": [29, 69], "parsefromstr": [7, 69, 72], "part": [10, 19, 35, 76, 77], "parti": [33, 40, 64], "partial": 10, "particular": [2, 3, 25, 37, 43, 60], "partit": 71, "pascal": 5, "pass": [2, 3, 4, 5, 7, 10, 19, 20, 24, 25, 26, 28, 30, 35, 36, 41, 42, 47, 51, 56, 57, 58, 60, 62, 67, 70, 72, 73], "pass_pipelin": [22, 40], "passag": 15, "passpipelin": 40, "past": [15, 23], "past_kv_len": 67, "path": [2, 7, 13, 20, 21, 28, 29, 31, 34, 38, 41, 43, 57, 68, 69, 70, 71, 76], "pattern": [41, 51], "pb": [6, 7, 20, 28, 68, 69, 72], "pd": [29, 66], "peft": 37, "per": [5, 24, 40, 41, 43, 47, 49, 54, 56, 57, 58, 59, 60, 61, 62, 63], "per_block": [42, 57, 58], "per_channel": [42, 52, 57, 58], "per_grouped_channel": [42, 46, 47, 48, 49, 60, 61, 63], "per_kernel": 52, "per_scalar": 52, "per_tensor": [41, 42, 47, 48, 58], "percentag": [30, 42], "percentil": [51, 52], "perf_count": [34, 67], "perform": [2, 6, 8, 10, 11, 13, 19, 20, 25, 26, 28, 33, 34, 36, 40, 41, 42, 45, 46, 47, 50, 51, 52, 53, 55, 56, 57, 58, 60, 63, 64, 67, 70, 71, 72], "period": [13, 31], "perl": 29, "perm": 33, "permit": 21, "permut": 33, "perplex": 42, "persist": [34, 67], "person": [5, 26, 74, 78], "perturb": 46, "photo": 24, "photograph": 61, "pi": 70, "pick": [20, 28, 32, 40, 71], "pil": [1, 5, 7, 8, 24, 26, 34, 72], "pil_img": 34, "pillow": [7, 26, 72], "pip": [5, 6, 7, 8, 10, 11, 23, 25, 26, 31, 72], "pipelin": [2, 18, 20, 25, 26, 29, 30, 43, 47, 61, 63, 64, 66, 74], "pipeline_spec": 77, "pipelineclassifi": 77, "pixel": [5, 7, 8, 18, 21, 24], "place": [2, 19, 30, 34, 41, 46, 64, 67, 77], "placehold": [7, 21, 69], "platform": [26, 61], "pleas": [13, 20, 23, 24, 26, 31, 47, 54, 59], "plot": [5, 72], "plt": 72, "plu": [35, 43], "plug": 47, "png": 24, "point": [7, 20, 35, 37, 40, 41, 42, 46, 48, 56, 57, 58, 65, 72, 78], "polynomialdecayschedul": [52, 60], "pool_siz": 76, "pooled_output": 15, "poor": 42, "popular": 26, "portion": [2, 10, 71], "pose": 18, "poseestim": 78, "posefind": 78, "posenet": 78, "posenet_model": 78, "posenet_with_preview_typ": 78, "posenetmobilenet075s16fp16": 78, "posit": [2, 6, 21, 33, 67], "possibl": [3, 6, 19, 21, 23, 28, 31, 35, 36, 38, 43, 54, 59, 60, 75, 76, 77], "post": [6, 42, 43, 46, 49, 52, 57, 59, 60, 61, 64], "postprocess": 6, "posttrainingpalett": [41, 42, 46, 47, 60, 62, 63], "posttrainingpalettizerconfig": [41, 42, 47, 60, 61, 63], "posttrainingquant": [42, 56, 57, 62], "posttrainingquantizerconfig": [42, 57], "potenti": [34, 67], "pottedpl": [5, 78], "pow": 70, "power": [20, 39, 43, 64], "practic": [21, 26, 30, 32, 37, 46, 51, 61, 69, 72], "pre": [6, 7, 8, 13, 18, 20, 24, 26, 28, 43, 46, 49, 51, 56, 59, 63, 64, 72, 78], "precis": [1, 2, 7, 20, 30, 32, 43, 46, 48, 54, 56, 60, 61, 63, 65], "pred": 6, "predefin": 26, "predetermin": [18, 20, 32], "predict": [0, 1, 5, 6, 10, 11, 15, 20, 21, 24, 28, 30, 33, 37, 49, 59, 63, 64, 69, 70, 71, 75, 76, 77, 78], "predict_with_coreml": 8, "predicted_feature_nam": 76, "predictedfeaturenam": 77, "predictedprobabilitiesnam": 77, "prediction_dict": 10, "prediction_model": 15, "prefer": 34, "prepar": [8, 41, 47, 52, 57, 60, 63], "prepare_qat_fx": 57, "preprocess": [1, 5, 13, 18, 26, 69], "preprocess_for_ev": 7, "presenc": 51, "present": [26, 28, 34, 43], "preserv": [30, 42, 54, 56, 63, 71], "pressur": 59, "pretrain": [5, 8, 13, 42, 60], "preview": [0, 5, 15, 24, 26, 29], "previou": [2, 5, 6, 7, 8, 10, 16, 21, 24, 25, 26, 28, 33, 34, 40, 41, 52, 59, 62, 67, 72, 75], "previous": [23, 26, 67], "previous_state_c": 6, "previous_state_h": 6, "price": [29, 66], "primari": [13, 43, 64], "primarili": [49, 54, 63], "print": [1, 3, 6, 7, 10, 11, 16, 20, 21, 26, 29, 30, 33, 34, 41, 67, 69, 72, 77], "print_funct": 7, "prior": [20, 24, 40, 43], "privat": 64, "pro": [34, 42, 43, 49, 54, 57, 58, 59, 60, 62, 67], "prob": 6, "probabl": [1, 6, 7, 11, 30, 35, 75, 76, 77], "problem": [23, 27, 51], "proce": 23, "process": [6, 7, 8, 13, 23, 26, 28, 30, 34, 37, 38, 39, 40, 42, 44, 46, 47, 53, 58, 61, 63, 64, 65, 67, 71, 75], "processor": 43, "produc": [2, 5, 6, 7, 10, 13, 16, 18, 24, 28, 30, 32, 33, 34, 35, 37, 38, 40, 41, 42, 51, 61, 62, 65, 67, 71, 72, 76], "product": 23, "prog": [33, 67], "program": [4, 5, 7, 8, 9, 13, 15, 17, 19, 20, 21, 28, 32, 67, 68, 69, 70, 73], "programmat": [2, 26], "progress": [7, 23], "project": [2, 5, 7, 8, 25, 26], "prompt": [34, 42, 61], "promptli": 23, "pronounc": 42, "propag": [6, 31], "properli": [10, 23], "properti": [16, 19, 20, 30, 32, 65, 71, 75], "prospect": 69, "proto": [2, 29, 30, 68, 76, 77], "protobuf": [2, 20, 28, 29, 30, 33, 69], "provid": [1, 2, 5, 6, 8, 16, 17, 19, 20, 23, 24, 25, 26, 28, 30, 34, 42, 44, 46, 47, 49, 51, 52, 53, 58, 60, 63, 64, 65, 67, 68, 69, 70, 71, 76, 77, 78], "prune": [0, 18, 20, 39, 40, 41, 43, 53, 54, 58, 62, 63], "prune_config": [41, 61], "prune_weight": [41, 51, 52, 60, 62], "pruned_model": [41, 60], "pruned_quant_model": 41, "pruned_torch_model": 40, "pruner": [41, 52, 60, 61], "pruning_config": 41, "pruning_schedul": 60, "pt": [28, 36, 68], "ptp_config": 61, "ptq": 56, "public": [26, 29, 78], "publicli": 23, "pull": 23, "purpos": [21, 61], "put": 19, "putalpha": 5, "putpalett": 5, "py": [6, 7, 26, 75], "pyplot": 72, "python": [2, 6, 7, 8, 18, 19, 20, 24, 26, 29, 33, 39, 47, 61, 64, 67, 69, 72, 73], "python_coreml_stable_diffus": 61, "pytorch": [0, 2, 16, 19, 21, 23, 25, 30, 31, 33, 34, 35, 36, 38, 42, 43, 49, 52, 60, 61, 62, 63, 64, 73, 78], "q": [31, 67], "q_len": 67, "qa": [15, 78], "qat": [56, 63], "qint": 40, "qint8": 41, "qualifi": 52, "qualiti": 42, "quant_config": 41, "quant_finalized_model": 41, "quant_max": 47, "quant_min": 47, "quant_model": 41, "quantiti": 43, "quantiz": [0, 18, 23, 30, 39, 43, 46, 47, 51, 52, 53, 54, 59, 61, 62, 63], "quantization_granular": 52, "quantization_mod": 65, "quantization_n_bit": 40, "quantization_scal": 40, "quantization_schem": [41, 52, 57, 60], "quantization_util": [62, 65], "quantizationgranular": 58, "quantize_activ": 47, "quantize_spec_weight": 39, "quantize_weight": [39, 65], "quantized_model": [57, 60, 65], "quantized_torch_model": 40, "quantizedlayerselector": 65, "queri": [20, 67], "query_length": 67, "question": [15, 20, 23, 42], "quick": [34, 43, 63, 77], "quickdraw": 77, "quicker": 60, "quickest": 60, "quickli": [43, 51], "quickstart": 24, "quint": 40, "quint8": 57, "quit": 60, "r": [5, 6, 37, 60], "rais": [19, 21, 75], "rand": [8, 13, 21, 24, 28, 31, 33, 34, 36, 37, 41, 57, 69, 70], "randint": [10, 41, 67], "randn": [35, 41], "random": [5, 8, 10, 13, 21, 24, 33, 34, 37, 41, 69, 70], "random_norm": 70, "random_token": 10, "randomli": [31, 36], "rang": [5, 8, 10, 11, 18, 20, 24, 35, 41, 47, 52, 56, 58, 60, 67, 72, 75], "range_shap": 21, "rangedim": [10, 21, 31, 67], "rank": [8, 37, 40], "rather": [20, 24, 26, 33, 34, 53], "ratio": [30, 42, 43, 46, 49, 51, 52, 54, 56, 59, 72], "raw": [6, 8, 25], "rb": [7, 69, 72], "re": [5, 21, 30, 35, 67], "reach": 11, "read": [7, 8, 26, 40, 42, 43, 64, 67, 69, 72], "read_csv": [29, 66], "read_stat": 67, "readabl": 29, "readi": [24, 34], "readlin": 7, "readm": 25, "real": 43, "realist": 61, "realiz": 40, "realli": 43, "reason": [8, 13, 24, 37, 43], "receiv": 68, "recent": [25, 39, 75], "recip": [24, 49], "reclaim": 61, "recognit": [6, 18, 69], "recommend": [12, 13, 20, 25, 31, 39, 40, 43, 57, 59, 63, 68, 69, 72], "reconstruct": 51, "record": 40, "recov": [46, 56], "red": [23, 24], "red_bia": 24, "reduc": [18, 20, 32, 33, 39, 42, 43, 49, 53, 54, 57, 58, 59, 60, 61, 64, 65, 71], "reduce_axes_0": 33, "reduce_keep_dims_0": 33, "reduce_mean": 33, "reduce_sum": 3, "reduct": [42, 51], "refer": [2, 3, 22, 24, 26, 27, 30, 32, 33, 41, 43, 47, 48, 52, 53, 54, 57, 58, 59, 61, 64, 65, 66, 67, 79], "refin": 61, "regain": [60, 61, 63], "regex": 52, "regim": 42, "regist": 40, "register_buff": [40, 67], "register_op": 19, "register_tf_op": [3, 19], "register_torch_op": 3, "registr": 19, "regnet_y_128fg": 34, "regnet_y_128gf": 34, "regress": [30, 37, 67], "regressor": [18, 37, 66], "regularli": 3, "rel": [34, 42], "relat": [6, 40], "releas": [23, 25, 62, 67, 72], "relev": 30, "reliabl": [25, 42], "reload": 30, "relu": [33, 35, 36, 37, 70, 76], "remain": [6, 34, 52], "remov": [6, 8, 26, 64, 77], "renam": [7, 18, 24, 32], "rename_featur": [20, 24, 30, 32], "render": [61, 77], "repeat": [1, 10, 40], "replac": [41, 52, 69], "replace_custom_layer_nam": 39, "repo": [20, 22, 25, 61], "report": [13, 23, 31, 46, 49, 51, 60], "repositori": [6, 20, 23, 25], "repres": [2, 3, 6, 13, 18, 19, 21, 24, 29, 31, 34, 40, 41, 42, 43, 46, 47, 48, 49, 53, 58, 65, 68, 72], "represent": [13, 20, 22, 26, 33, 38, 40, 43, 48, 52, 53, 54, 64, 67], "repro": 23, "reproduc": 23, "request": [7, 8, 23, 26], "requir": [3, 12, 19, 21, 23, 24, 25, 29, 30, 40, 41, 42, 43, 46, 47, 51, 56, 57, 61, 63, 67, 71, 72, 76, 78], "rerun": 6, "resampl": 8, "research": [7, 26], "reset_default_graph": [7, 72], "reshap": [6, 8, 20, 34, 69], "reshape_1": 69, "reshapefrequ": 21, "resiz": [1, 5, 7, 8, 24, 26, 34, 72], "resize_to": 34, "resnet": [13, 31], "resnet34": [46, 49], "resnet50": [41, 44, 46, 49, 51, 54, 56, 59], "resolut": 21, "resolv": [3, 23], "respect": [2, 8, 49, 51, 52, 54, 57, 59, 60, 71], "respond": 23, "respons": [23, 64, 71], "rest": [10, 21], "restore_al": 69, "restore_op_nam": 69, "restrict": [7, 8, 34, 71], "result": [2, 5, 6, 7, 8, 13, 18, 23, 24, 30, 31, 34, 35, 36, 37, 41, 43, 44, 53, 57, 60, 67, 69, 71, 75, 78], "ret": 65, "ret_shap": 19, "retain": [46, 56, 60], "retriev": [7, 26, 69, 78], "return": [3, 5, 6, 8, 10, 19, 21, 24, 29, 30, 31, 33, 34, 35, 36, 37, 42, 57, 61, 63, 65, 67, 70, 72, 75], "return_dict": 11, "reus": [31, 36, 67], "revert": 75, "review": 23, "rewritten": 24, "rfind": 7, "rgb": [24, 34], "rgba": 5, "right": [2, 5, 7, 8, 18, 25, 26, 43, 69, 72, 77, 78], "rigor": 31, "rmse": 30, "roughli": [13, 31, 37], "round": [42, 56, 57, 58, 63], "row": 42, "rtn": [56, 57], "rtol": [69, 70], "rule": 65, "run": [3, 5, 6, 7, 8, 20, 21, 22, 24, 25, 28, 30, 31, 33, 34, 35, 36, 37, 40, 41, 42, 43, 46, 47, 49, 54, 59, 61, 64, 67, 69, 70, 71, 72], "runtim": [2, 20, 21, 22, 24, 40, 41, 44, 49, 53, 58, 59, 60, 62, 63, 65, 67, 71], "safest": 65, "sai": [23, 30, 60], "same": [2, 3, 5, 6, 7, 8, 10, 11, 23, 24, 26, 28, 34, 35, 37, 40, 41, 42, 46, 47, 48, 49, 51, 57, 67, 71, 72], "sampl": [6, 15, 23, 24, 26, 30, 31, 36, 40, 41, 42, 46, 51, 56, 60, 61, 63, 71, 72, 78], "sample_data": [57, 63], "sandler": 26, "sanit": 21, "save": [2, 5, 7, 8, 12, 13, 15, 18, 20, 21, 22, 27, 28, 30, 31, 34, 36, 37, 40, 41, 43, 53, 54, 57, 65, 66, 67, 68, 69, 70, 71, 72, 75, 76, 77, 78, 79], "save_multifunct": 37, "save_spec": [29, 77], "savedmodel": [18, 28, 68, 70], "saver": 69, "scalabl": 2, "scalar": [33, 48], "scale": [7, 8, 20, 24, 26, 40, 47, 56, 57, 58, 59, 62, 63, 65, 70], "scale_1": 40, "scale_2": 40, "scaled_dot_product_attent": [31, 67], "scaler_spec": 30, "scenario": [3, 30, 34, 37, 41, 73], "scene": 33, "schedul": [52, 60], "schema": 40, "scheme": [40, 43, 58, 65, 71], "scikit": [0, 18, 29, 64, 73], "score": [5, 8, 42, 75, 76, 77], "score_valu": 8, "scorer": 6, "scorer_path": 6, "scratch": [31, 33, 36, 70], "script": [6, 13, 23, 25, 28, 36, 47], "scripted_model": [10, 35], "scripter": 36, "scroll": 25, "sdxl": 61, "seamlessli": [2, 40], "search": 0, "seat": 72, "seattl": 29, "sec": 34, "second": [3, 6, 34, 40, 42, 46, 63, 67, 77], "section": [1, 4, 8, 16, 20, 21, 33, 35, 36, 39, 40, 41, 43, 52, 56, 57, 60, 61, 62, 63, 65, 67, 71, 72, 73], "see": [0, 1, 2, 3, 5, 7, 8, 10, 11, 13, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 37, 38, 39, 40, 42, 43, 46, 47, 49, 51, 52, 57, 59, 60, 61, 63, 64, 65, 66, 68, 69, 70, 72, 73, 74, 75, 76, 78, 79], "seed": [42, 60, 61], "seen": 59, "seg_imag": 5, "segment": [2, 6, 12, 13, 18, 30], "segment_id": 15, "segmentationmodel_no_metadata": 78, "segmentationmodel_no_metadata_from_export": 5, "segmentationmodel_no_metadata_from_trac": 5, "segmentationmodel_with_metadata": [5, 30, 78], "select": [5, 8, 18, 26, 32, 35, 49, 54, 57, 59, 71], "selector": 65, "self": [5, 10, 19, 21, 23, 24, 31, 35, 36, 37, 65, 67, 70, 75], "selu": 3, "semant": 5, "send": 23, "sens": 35, "sensit": [30, 42, 49, 54, 59, 60, 62, 63, 65, 71], "sentenc": [11, 15], "sentence_2": 10, "sentence_frag": 10, "separ": [2, 10, 16, 26, 35, 37], "seq2seq": 6, "seq_len": 67, "seqlen": 42, "sequenc": [2, 6, 10, 11, 33, 37, 42, 67], "sequence_length": 11, "sequence_output": 15, "sequenti": [30, 56, 61, 76], "seri": [2, 6, 33], "serial": [2, 7], "serv": 42, "sess": [7, 69, 72], "session": [7, 26, 37, 64, 67, 69, 72], "session_config": 72, "set": [2, 7, 15, 18, 19, 20, 29, 30, 33, 34, 41, 42, 46, 47, 48, 51, 52, 53, 56, 57, 60, 61, 65, 67, 68, 69, 71, 72, 77, 78], "set_categorical_cross_entropy_loss": 76, "set_epoch": 76, "set_glob": 60, "set_index_typ": 75, "set_multiarray_ndshape_rang": 21, "set_number_of_neighbors_with_bound": 75, "set_op_nam": 30, "set_op_typ": 30, "set_sgd_optim": 76, "set_titl": 72, "setter": 7, "setup": [60, 76], "sever": [2, 24, 31, 34, 35, 36, 39, 42, 43, 46, 47, 49, 51], "sgd": [41, 76], "sgdoptim": 76, "sgdparam": 76, "sh": 25, "shader": 2, "shape": [1, 2, 4, 5, 6, 7, 8, 10, 13, 15, 19, 24, 26, 28, 31, 33, 34, 35, 36, 37, 40, 41, 42, 48, 57, 67, 69, 70, 72, 73, 76, 77], "share": [23, 37, 42, 48, 57, 58], "sharp": 61, "shave": 43, "sheep": [5, 78], "shell": 25, "shift_label": 42, "shift_logit": 42, "ship": 43, "short": [2, 6, 19, 26], "short_descript": [26, 29], "shortdescript": [75, 76, 77], "shot": [51, 52], "should": [7, 8, 19, 20, 21, 23, 25, 30, 34, 35, 36, 40, 42, 43, 52, 54, 56, 65, 77], "shouldn": 65, "show": [1, 3, 5, 7, 8, 11, 19, 21, 24, 25, 26, 29, 30, 33, 34, 37, 47, 48, 52, 67, 69, 72, 78], "shown": [2, 3, 8, 13, 16, 17, 20, 21, 24, 26, 28, 30, 31, 33, 34, 36, 37, 40, 41, 48, 61, 67, 71, 75], "shutil": 34, "side": [2, 5, 26, 78], "sidestep": 5, "signal": 72, "signal_energi": 72, "signific": [42, 46, 63], "significantli": [2, 71], "silu": 31, "similar": [8, 19, 20, 23, 26, 40, 42, 47, 48, 61, 67, 71], "similarli": [40, 49, 63, 72], "simpl": [10, 18, 24, 31, 36, 52, 65, 69, 76, 77], "simpleattent": 67, "simpleattentionwithkeyvaluecach": 67, "simplefilt": 5, "simplenet": 36, "simpli": [20, 31, 37, 47, 52, 63, 76], "simplic": 6, "simplifi": [2, 22, 58, 67], "simul": [3, 46, 56, 57, 60], "sinc": [2, 6, 7, 11, 16, 19, 24, 28, 31, 32, 34, 35, 37, 42, 46, 49, 51, 57, 59, 60, 61, 63, 65, 68, 69, 70, 71, 72], "singl": [2, 21, 22, 30, 33, 35, 37, 43, 48, 58, 63, 69, 71, 72, 78], "sink": 2, "sink_op": 69, "size": [1, 5, 6, 8, 11, 18, 19, 21, 24, 26, 29, 30, 31, 34, 36, 37, 40, 41, 42, 43, 47, 49, 51, 52, 53, 54, 59, 61, 63, 65, 66, 72, 75], "sketch": [18, 77], "ski": 61, "skip": [16, 25, 30, 35, 37, 52, 54, 60, 61, 65, 72], "skip_layer_typ": 65, "skip_special_token": 11, "sklearn": [29, 66], "skm": 47, "skmpalett": [42, 46, 47, 62, 63], "skmpalettizerconfig": [42, 47, 63], "sky": 61, "slice": 6, "slight": [43, 61], "slightli": [57, 60, 72], "slim": [7, 26], "slow": 59, "slowdown": 59, "slower": [13, 21, 31], "small": [3, 11, 35, 36, 42, 43, 46, 49, 51, 54, 63], "smaller": [30, 42, 43, 47, 60, 63], "smallest": [11, 53], "smith": [11, 29], "snapshot": 67, "snippet": [18, 19, 21, 24, 28, 29, 30, 31, 32, 34, 36, 41, 42, 60, 61, 69, 72], "snr": 72, "so": [2, 6, 8, 13, 15, 16, 19, 20, 21, 23, 24, 26, 28, 30, 31, 34, 35, 40, 53, 54, 57, 60, 61, 65, 67, 71, 72, 76], "soc": 41, "sofa": [5, 78], "soft": 46, "softmax": [7, 70, 76], "softwar": [23, 25, 71], "sole": 63, "solv": 51, "some": [2, 7, 10, 20, 21, 28, 35, 40, 41, 42, 43, 46, 47, 57, 61, 62, 65, 67, 69, 71, 78], "someth": [8, 63], "sometim": 59, "somewher": 69, "sonoma": 34, "sophist": 2, "sort": [19, 51], "sourc": [2, 3, 7, 9, 15, 20, 21, 26, 28, 32, 33, 34, 39, 40, 71, 73, 75], "source_model": [16, 17, 20, 24, 32, 71], "source_torch_model": 32, "space": [20, 34, 39, 40, 61, 64, 65, 77], "spars": [41, 42, 43, 46, 52, 53, 54, 60, 61, 62], "sparse_gpt_config": 52, "sparse_model": 61, "sparse_palettized_model": 61, "sparse_weight": 30, "sparsegpt": 62, "sparsif": 52, "sparsifi": [53, 62], "sparsiti": [30, 51, 52, 53, 54, 60, 61, 63], "spatial": 72, "spec": [16, 18, 20, 21, 30, 68, 72, 75, 77], "spec_valu": 75, "special": [1, 2, 10, 30, 34, 37, 40, 71], "specializationstrategi": 34, "specif": [2, 4, 7, 19, 21, 23, 24, 30, 34, 36, 37, 38, 42, 43, 44, 46, 47, 61, 63, 67, 71, 72, 77], "specifi": [2, 7, 8, 10, 17, 19, 20, 21, 22, 23, 24, 26, 27, 28, 30, 31, 32, 33, 37, 38, 41, 42, 47, 48, 52, 57, 60, 70, 71], "specificationvers": 77, "speech": [6, 18, 69], "speed": [20, 34, 41, 42, 49, 54, 67], "speedup": 60, "spent": 42, "splitlin": [8, 26], "spot": [60, 72], "sqrt": 70, "squar": [29, 46, 67, 76], "squeez": [8, 72], "squeezellm": [42, 46], "src_function_nam": 37, "stabilityai": 61, "stabl": [13, 25, 30, 34, 61], "stable_diffusion_version_stabilityai_st": 61, "stablediffus": 44, "stack": [2, 6, 20, 56], "stackoverflow": [7, 72], "stage": [6, 23, 37, 57], "stai": 60, "stand": 78, "standard": [5, 6, 8, 23, 24, 43, 46, 49, 51], "star": 77, "stark": 42, "start": [0, 1, 6, 8, 13, 14, 24, 31, 32, 33, 34, 37, 38, 40, 41, 42, 46, 47, 48, 49, 51, 52, 53, 57, 60, 61, 63, 64, 67, 77, 78], "state": [4, 6, 13, 31, 39, 42, 49, 54, 59, 60], "state1": 67, "state2": 67, "state_dict": 67, "stateless": 67, "statement": [3, 5, 15], "statetensorspec": 67, "statetyp": [31, 67], "static": [19, 20, 32, 42], "statist": [49, 54, 57, 59, 60], "statu": [13, 23, 31], "std": [5, 8, 24], "stddev": 69, "steel": 61, "step": [1, 3, 5, 6, 7, 8, 12, 13, 15, 21, 23, 25, 26, 28, 29, 31, 34, 36, 37, 39, 41, 46, 47, 52, 57, 60, 61, 63, 64, 67, 69, 70, 72, 77, 78], "still": [2, 20, 21, 23, 26, 28, 30, 31, 34, 35, 40, 46, 49, 51, 52, 60, 61, 68], "stochast": 76, "storag": [7, 8, 20, 26, 39, 40, 53, 54, 64, 65], "store": [2, 15, 24, 34, 40, 42, 48, 53, 54, 58, 67, 76], "stori": 61, "str": 7, "straight": 57, "straightforward": 2, "strategi": [43, 67, 76], "strict": 67, "strictli": 64, "string": [3, 24, 26, 30, 54, 76, 78], "stringinputtyp": 19, "strip": 6, "stroke": 77, "strongli": [2, 71], "structur": [35, 51, 52, 53, 54], "studi": 51, "style": [21, 37, 61, 71, 72], "styliz": [24, 72], "sub": 42, "subgraph": 6, "submit": [18, 23], "submodel": 37, "submodul": 46, "subplot": 72, "subsect": 40, "subsequ": [2, 21, 34, 67], "subset": [42, 46, 51], "subspac": 62, "substanti": 46, "subwai": 10, "succe": 40, "suddenli": 11, "suffici": [2, 16, 46, 51, 52, 56, 72], "suggest": 56, "suit": 43, "suitabl": [11, 72], "sum": [67, 72], "summar": [16, 32, 47, 60, 61, 68], "summat": 3, "sunni": 61, "super": [5, 10, 19, 21, 31, 35, 36, 37, 65, 67, 70], "support": [3, 10, 13, 16, 20, 29, 31, 33, 34, 35, 40, 43, 46, 47, 48, 51, 58, 61, 62, 63, 65, 67, 71], "supportvectorregressor": 29, "sure": [3, 20, 23, 26, 34, 52, 67], "svm": 2, "svm_paramet": 27, "svm_problem": 27, "svm_train": 27, "svmutil": 27, "sweet": 60, "swift": [2, 18, 28, 34, 71], "sy": 7, "symbol": [19, 57], "symmetr": [20, 40, 41, 56, 57, 60, 65], "system": [6, 7, 10, 16, 24, 25, 26, 34, 71, 72], "t": [3, 6, 13, 17, 19, 21, 23, 28, 30, 32, 33, 34, 35, 36, 42, 61, 65, 67, 78], "t5": 3, "t_start": 67, "tab": [1, 5, 15, 21, 24, 25, 26, 37, 49, 54, 59, 75, 78], "tabl": [16, 40, 41, 42, 43, 46, 47, 48, 49, 51, 59, 60, 61, 62, 63, 65, 68, 78], "tailor": 33, "take": [5, 6, 8, 10, 11, 13, 18, 20, 26, 28, 32, 33, 34, 40, 41, 42, 43, 46, 47, 52, 53, 56, 60, 61, 62, 63, 67, 69, 71, 72, 77], "taken": [34, 42, 60, 61], "talk": 46, "tall": 61, "tanh": 70, "tar": 7, "tarfil": 7, "target": [16, 17, 19, 24, 26, 28, 30, 32, 37, 38, 40, 41, 47, 48, 51, 53, 57, 60, 63, 67, 76], "target_function_nam": 37, "target_modul": 37, "target_spars": [41, 52, 60, 61], "task": [6, 25, 37, 42, 49, 59, 63], "team": 23, "techniqu": [6, 20, 33, 39, 41, 43, 44, 53, 54, 56, 60, 61, 63], "technologi": 64, "tell": [23, 76], "temb": 61, "tempfil": 69, "templat": 23, "temporari": 34, "tend": 42, "tensor": [2, 3, 5, 6, 7, 10, 11, 16, 19, 24, 28, 31, 32, 33, 35, 36, 40, 41, 42, 46, 47, 48, 49, 52, 53, 57, 58, 60, 61, 62, 65, 67, 69, 72], "tensorflow": [0, 2, 3, 8, 16, 20, 21, 23, 25, 26, 32, 33, 34, 38, 64, 73, 78], "tensorflow1": [7, 72], "tensorflow_hub": [15, 70], "tensorinputtyp": 19, "tensorspec": [33, 67, 70], "tensortyp": [5, 6, 8, 10, 13, 21, 24, 28, 31, 32, 34, 35, 37, 41, 57, 67, 69], "tenth": 47, "term": [6, 53, 63, 65, 76], "termin": [25, 35], "test": [5, 10, 11, 13, 15, 21, 23, 26, 30, 31, 33, 34, 41, 43, 65, 67, 69, 70, 72, 78], "testconvmodul": 21, "text": [2, 6, 8, 10, 11, 15, 34, 37, 61], "textencod": 61, "tf": [3, 6, 7, 18, 20, 23, 24, 26, 28, 32, 68, 69, 70, 72, 77], "tf1": [20, 23, 33], "tf2": [20, 23], "tf_alia": 19, "tf_frozen": 69, "tf_graph": 69, "tf_hub": [15, 70], "tf_input_nam": 7, "tf_keras_model": 70, "tf_model": [6, 15, 19, 20, 24, 28, 69, 70], "tf_model_path": 7, "tf_op_registri": [3, 19], "tf_out": [7, 69, 70], "tf_out_tensor": 72, "tf_output_nam": 7, "tfcoreml": 39, "tfdistilbertformaskedlm": 15, "tfhub": [15, 70], "tft5model": 3, "than": [2, 13, 17, 19, 21, 24, 26, 28, 30, 31, 33, 34, 35, 37, 42, 46, 47, 51, 52, 53, 57, 59, 60, 61, 63, 65, 67, 68], "thei": [2, 19, 23, 24, 40, 47, 48, 57, 59, 63, 71], "them": [2, 5, 6, 7, 11, 13, 18, 22, 23, 24, 25, 26, 30, 31, 33, 34, 37, 40, 43, 46, 57, 63, 67, 68, 71, 76, 78], "therebi": [20, 34, 40, 48, 53, 54], "therefor": [2, 8, 10, 19, 24, 26, 28, 34, 51, 59, 72], "thi": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 15, 16, 19, 20, 21, 23, 24, 25, 26, 28, 29, 30, 31, 33, 34, 35, 37, 40, 41, 42, 43, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 60, 61, 62, 63, 65, 67, 68, 69, 70, 71, 72, 73, 75, 76, 77, 78], "third": [33, 40, 64], "those": [6, 10, 30, 37, 54, 57, 63], "though": [47, 63], "three": [2, 6, 8, 24, 29, 37, 43, 46, 51, 53, 58, 63, 67, 72], "threshold": [30, 40, 51, 52], "through": [2, 5, 8, 13, 23, 31, 35, 36, 39, 52, 56, 57, 58, 60, 61, 64], "throughput": 59, "thu": [35, 42, 46], "thumb": 23, "ti": [2, 71], "tick": 34, "time": [6, 7, 10, 11, 13, 18, 20, 31, 35, 41, 42, 43, 46, 47, 49, 51, 54, 59, 60, 63, 67, 72], "timestep": 61, "tini": 77, "tinydrawingclassifi": 77, "tinydrawingembed": 77, "tip": 37, "titl": 26, "tmp": 6, "to_typ": 30, "togeth": 48, "toi": 13, "token": [10, 15, 42, 67], "token_id": 67, "token_predictor": 10, "tokenized_prompt": 11, "tokenized_train_data": 42, "tolist": 11, "too": [47, 61], "tool": [5, 6, 7, 8, 11, 12, 13, 14, 15, 16, 17, 18, 21, 22, 23, 24, 26, 29, 31, 32, 33, 37, 40, 42, 51, 56, 58, 61, 65, 67, 68, 69, 70, 75, 76, 77], "top": [1, 6, 8, 19, 26, 33, 35, 49, 59, 60, 61], "top_3_indic": 8, "top_3_indices_coreml": 8, "topic": [1, 33, 41, 75], "topk": 19, "topkv2": 19, "torch": [3, 5, 10, 11, 13, 28, 31, 34, 35, 36, 37, 42, 46, 49, 51, 54, 56, 57, 59, 60, 61, 62, 63, 67], "torch2coreml": 61, "torch_dtyp": [11, 42], "torch_kvcache_output": 67, "torch_model": [5, 8, 11, 13, 24, 28, 34, 47, 61, 67], "torch_model_kvcach": 67, "torch_op_registri": 3, "torch_out": [8, 10], "torch_out_np": 8, "torch_output": 67, "torch_predict": 5, "torchscript": [8, 10, 24, 28, 35, 36, 68], "torchvis": [5, 12, 13, 18, 24, 28, 34, 41, 49, 59, 60], "torchvision_mobilenet_v2": 28, "total": [13, 30, 31], "totensor": 5, "tour": 43, "toymodel": 67, "toymodelwithkeyvaluecach": 67, "trace": [3, 12, 13, 18, 21, 28, 32, 34, 37, 40, 41, 47, 57, 63, 67], "traceabl": 57, "traceback": 75, "traced_model": [5, 8, 13, 21, 24, 28, 34, 41, 47, 57, 67], "traced_model_kvcach": 67, "traced_palettized_model": 63, "traced_token_predictor": 10, "tracer": [5, 10, 13, 24, 36], "track": 67, "trade": [42, 43, 60, 63], "tradeoff": 46, "train": [1, 2, 5, 6, 7, 8, 10, 12, 13, 18, 24, 26, 28, 29, 30, 31, 34, 35, 36, 41, 42, 43, 46, 49, 54, 58, 59, 60, 61, 64, 66, 71, 72, 76, 77, 78, 79], "train_data": 42, "train_dataload": 52, "train_load": 60, "train_step": [47, 52, 60], "trainabl": [15, 70], "training_util": 47, "traininginput": [75, 76, 77], "transcript": 6, "transfer": [21, 72], "transform": [2, 5, 6, 8, 10, 11, 14, 18, 22, 24, 31, 42, 43, 46, 51, 56, 62, 67, 71, 78], "transit": 39, "translat": [2, 3, 13, 19, 20, 31, 33], "transpar": [6, 61], "transpos": [3, 8, 33, 34], "transpose_i": 3, "transpose_perm_0": 33, "transpose_x": 3, "tree": [2, 24, 26, 61, 64], "tri": 28, "triag": 23, "trial": [46, 51], "triviaqa": 42, "troubleshoot": 20, "true": [3, 5, 6, 7, 8, 10, 11, 13, 19, 30, 34, 35, 41, 47, 52, 57, 60, 65, 69, 70, 75, 76, 77], "truncated_norm": 69, "trust_remote_cod": 11, "try": [5, 13, 20, 23, 31, 43, 60, 61, 63, 64, 65, 75], "tune": [18, 37, 42, 43, 49, 51, 52, 54, 62, 64, 76], "tupl": 5, "turn": [42, 57, 60, 71], "turquois": 23, "tutori": [13, 42, 61, 64], "tuxedo": 61, "tvm": [26, 34], "tvo": [13, 16, 39, 68], "tvormonitor": [5, 78], "tvos15": [16, 17, 24, 38, 68], "tvos16": [48, 53], "twice": 35, "two": [6, 30, 34, 35, 37, 39, 40, 42, 47, 54, 57, 60, 61, 63, 68, 72, 76], "txt": [6, 7, 8, 26], "type": [0, 1, 2, 4, 5, 6, 7, 8, 9, 13, 15, 17, 19, 20, 23, 26, 28, 29, 37, 39, 40, 42, 44, 46, 47, 52, 53, 57, 58, 62, 65, 67, 69, 70, 73, 74, 76, 77], "type_domain": 19, "type_infer": 19, "typic": [8, 23, 24, 28, 37, 43, 46, 47, 49, 51, 53, 54, 56, 57, 63, 67, 69, 70, 71], "typo": 23, "u": [5, 7, 8, 10, 13, 23, 25, 26, 42, 60, 72, 76], "ui": [26, 67], "uint8": [5, 34, 41, 47], "unabl": 61, "uncompress": [41, 42, 46, 49, 51, 63], "uncompressed_model_path": [47, 63], "uncompressed_torch_model": 63, "under": [6, 23, 26, 62, 63, 68, 78], "undergo": [2, 62], "underli": 34, "understand": [2, 8, 13, 23, 26, 67, 71], "undetermin": 21, "unet": 61, "unet2dconditionmodel": 61, "unet2dconditionmodelxl": 61, "unetmidblock2dcrossattn": 61, "unexpect": [23, 30], "unif": 2, "unifi": [1, 2, 6, 8, 13, 14, 16, 20, 24, 26, 28, 32, 33, 38, 39, 64, 68, 70, 73], "uniform": [47, 48], "uniqu": [30, 37, 40, 47], "unique_valu": 30, "unit": [2, 7, 8, 20, 43, 49, 54, 59, 61, 65, 70, 71], "unknown": [69, 77], "unless": [20, 49, 51, 54, 59], "unlik": [32, 58, 71], "unnecessari": 6, "unpreced": 34, "unsign": 40, "unsqueez": [5, 11], "unstructur": [51, 53, 54], "unsupport": [2, 3, 19, 33], "until": [10, 11], "untyp": 28, "unus": [22, 60], "unzip": 7, "up": [5, 20, 21, 23, 26, 30, 32, 34, 37, 40, 41, 42, 43, 46, 51, 52, 54, 60, 61, 63, 65, 67, 78], "up_block": 61, "upblock2d": 61, "updat": [0, 2, 6, 20, 25, 30, 37, 39, 40, 46, 47, 57, 61, 62, 67, 75], "updatableknn": 75, "updatablemnistdigitclassifi": 76, "update_step": [52, 60], "updated_model": 30, "upgrad": [20, 39], "upon": [11, 52, 56], "upper": 21, "upper_bound": [21, 67], "url": [7, 76], "urllib": [5, 7, 8, 26], "urlopen": [8, 26], "urlretriev": 7, "us": [0, 1, 2, 4, 5, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 22, 23, 25, 27, 28, 31, 33, 36, 37, 38, 39, 41, 42, 43, 44, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 61, 62, 63, 64, 65, 66, 68, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79], "usag": [34, 52], "use_cach": [11, 42], "usecpuonli": [7, 34, 69], "user": [2, 3, 18, 21, 25, 27, 31, 39, 40, 43, 47, 57, 64, 71, 77], "user_defined_metadata": [5, 15, 26, 78], "userwarn": 7, "usual": [3, 34, 47, 54, 56, 67, 71], "utf": 8, "utf8": 26, "util": [0, 1, 6, 19, 20, 21, 23, 28, 29, 34, 37, 39, 41, 43, 47, 52, 61, 65, 67, 76, 77], "v": [2, 31, 42, 67], "v0": 5, "v1": [7, 69], "v2": 69, "v_cach": 67, "vae": 61, "vaedecod": 61, "val": [19, 30, 33], "valid": [6, 31, 36, 46, 51, 72, 75], "valu": [5, 6, 7, 8, 11, 16, 19, 24, 26, 28, 30, 33, 34, 35, 37, 40, 41, 42, 43, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 62, 65, 67, 68, 69, 70, 71, 76], "valueerror": [19, 75], "values_vector": 8, "var": 2, "vari": [34, 42, 43, 44, 49, 54, 59, 60, 61, 63, 65, 71, 72], "variabl": [2, 32, 36, 69, 71], "variant": [6, 11, 15, 37, 61], "variat": 37, "varieti": [3, 29, 51, 69], "variou": [19, 32, 41, 42, 43, 56, 57, 60, 61], "vase": 8, "ve": 35, "vector": [2, 37, 40, 53, 64, 75, 77], "vehicl": 26, "venv": 25, "veri": [8, 24, 34, 35, 42, 43, 46, 61, 63], "verifi": [3, 24, 26, 34, 64, 67, 69, 72, 75], "versa": 30, "version": [2, 3, 7, 8, 10, 11, 12, 13, 15, 16, 17, 23, 24, 26, 28, 29, 31, 32, 34, 35, 42, 43, 44, 46, 47, 49, 54, 59, 60, 61, 62, 63, 67, 70, 71, 72, 77], "version_info": 7, "via": [13, 30, 31, 37, 40, 41, 52, 57, 62, 63], "vibrant": 34, "vice": 30, "video": [37, 64], "view": [6, 24, 42, 43, 61], "viewer": 39, "vision": [5, 8, 24, 26, 64], "visit": 43, "visual": [2, 34, 39], "visualize_spec": 39, "vit": [13, 31], "voc": 5, "vocab_s": 67, "w": [24, 60, 61, 69, 70], "w1": 31, "w16a16": 59, "w2": 31, "w3": 31, "w4": 41, "w8a8": [43, 57, 59, 60, 62], "w_quantiz": 58, "w_unquant": 58, "wa": [2, 8, 10, 11, 12, 17, 26, 40, 42, 52, 60, 67, 68, 72, 76, 77], "wai": [2, 13, 23, 24, 25, 26, 28, 30, 31, 34, 36, 40, 41, 42, 43, 46, 52, 60, 63, 64, 67, 70, 71], "walk": [23, 64], "want": [2, 5, 10, 19, 20, 24, 30, 35, 40, 41, 43, 57, 63, 67, 71], "warm": 61, "warn": [5, 7, 10, 21, 35], "watcho": [13, 16, 26, 39, 68], "watchos8": [16, 17, 24, 38, 68], "watchos9": [48, 53], "wav": 6, "wave": 72, "we": [5, 7, 11, 13, 20, 23, 25, 28, 30, 31, 33, 36, 40, 41, 42, 46, 47, 49, 51, 52, 56, 57, 60, 61, 67, 69, 75, 76], "web": 10, "weight": [0, 2, 6, 16, 18, 20, 26, 28, 37, 39, 41, 42, 43, 46, 47, 48, 49, 51, 52, 53, 54, 58, 59, 60, 61, 62, 63, 69, 70, 76], "weight_dtyp": [42, 52, 57], "weight_metadata": 30, "weight_metadata_dict": 30, "weight_nam": 30, "weight_quant_model_config": 63, "weight_quant_op_config": 63, "weight_scal": 40, "weight_threshold": [30, 47, 52, 57], "weighting_schem": [75, 77], "weights_dir": 30, "welcom": 23, "well": [5, 24, 26, 37, 40, 42, 43, 46, 47, 48, 51, 52, 56, 57, 58, 59, 60, 71, 72, 78], "were": [5, 35, 39, 40, 42, 46, 47, 49, 51, 54, 59, 61], "what": [0, 1, 15, 20, 23, 26, 33, 35, 37, 39, 43, 44, 48, 53, 63, 69, 75, 76], "wheel": [25, 39], "when": [2, 7, 8, 10, 13, 19, 20, 21, 23, 24, 26, 28, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 46, 47, 48, 49, 53, 54, 57, 58, 59, 63, 65, 67, 71, 72], "whenev": [3, 19, 25], "where": [31, 37, 40, 41, 42, 43, 46, 47, 48, 51, 53, 54, 57, 60, 61, 67], "wherea": [42, 60], "whether": [16, 28, 40, 43, 71], "which": [1, 2, 3, 5, 6, 7, 8, 10, 11, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 39, 40, 41, 42, 43, 44, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 60, 61, 63, 64, 67, 68, 69, 71, 72, 76, 77], "whichoneof": [8, 16, 29], "while": [2, 3, 6, 10, 18, 19, 21, 24, 28, 34, 37, 38, 40, 42, 43, 46, 47, 49, 52, 53, 56, 60, 61, 63, 64, 65, 68, 71, 72, 77], "white": 77, "whiten": 24, "whl": 25, "whole": [35, 37, 48, 53, 58, 71], "whose": [22, 34, 40, 41, 43], "why": 33, "wide": [28, 34, 61, 64, 68], "width": [6, 24, 34, 76, 77], "width_multipli": 78, "wife": 11, "wikipedia": 7, "wikitext2": 42, "wise": [40, 42], "within": [7, 10, 19, 21, 26, 37, 42, 53, 60, 61, 65, 71, 75], "without": [2, 5, 6, 12, 24, 28, 40, 47, 53, 57, 60, 63, 65, 67, 68, 70, 75], "wk": 31, "wo": 31, "won": [13, 35, 36], "word": [10, 11], "work": [2, 5, 8, 11, 13, 18, 21, 24, 30, 31, 34, 43, 46, 51, 56, 60, 63, 65, 67, 69, 70, 71, 78], "workaround": [20, 37], "workflow": [0, 9, 12, 14, 18, 23, 33, 34, 40, 43, 47, 52, 56, 59, 60, 62], "world": [34, 64], "worri": 36, "would": [7, 21, 33, 34, 35, 37, 42, 47, 52, 53, 57, 60, 69, 71, 72], "wq": 31, "wrap": [1, 5, 34], "wrapped_typ": 67, "wrappeddeeplabv3resnet101": 5, "write": [3, 20, 33, 64, 65, 67], "write_graph": 69, "written": [57, 76], "wv": 31, "wwdc": [37, 43, 67], "www": 70, "x": [3, 5, 7, 8, 10, 19, 21, 23, 24, 27, 31, 33, 34, 35, 36, 37, 67, 69, 70, 72, 76, 77], "x_1": 67, "x_2": 67, "x_blue_channel": 24, "x_green_channel": 24, "x_red_channel": 24, "x_shape": 19, "x_type": 19, "xception": [28, 70], "xcode": [0, 2, 8, 15, 20, 21, 28, 29, 34, 37, 49, 54, 59, 67, 71], "xcrun": 2, "xgboost": [0, 18, 64, 73], "xl": 61, "y": [3, 27, 31, 35, 67, 69, 70, 72], "y_1": [37, 67], "y_2": [37, 67], "y_blue_channel": 24, "y_green_channel": 24, "y_red_channel": 24, "yaml": [47, 52, 57], "ye": 40, "yet": [2, 5], "yield": [41, 61], "you": [1, 2, 3, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 47, 49, 51, 56, 57, 59, 60, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 78, 79], "your": [1, 2, 3, 6, 7, 8, 13, 17, 18, 19, 20, 21, 23, 24, 25, 26, 32, 33, 34, 35, 36, 37, 39, 40, 43, 44, 46, 51, 57, 59, 63, 64, 65, 67, 70, 71, 72, 77], "z": 31, "zero": [6, 11, 15, 34, 35, 40, 41, 43, 51, 52, 53, 54, 56, 57, 58, 62, 67], "zero_grad": 41, "zero_point": [40, 58], "zero_point_1": 40, "zero_point_2": 40, "zhmoginov": 26, "zhu": 26, "zsh": 25}, "titles": ["Core ML Tools", "Classifiers", "Comparing ML Programs and Neural Networks", "Composite Operators", "Conversion Options", "Converting a PyTorch Segmentation Model", "Converting a TensorFlow 1 DeepSpeech Model", "Converting a TensorFlow 1 Image Classifier", "Converting a torchvision Model from PyTorch", "Converting Deep Learning Models", "Converting a Natural Language Processing Model", "Converting an Open Efficient Language Model", "Converting from PyTorch", "PyTorch Conversion Workflow", "Converting from TensorFlow", "Converting TensorFlow 2 BERT Transformer Models", "Convert Models to ML Programs", "Convert Models to Neural Networks", "Examples", "Custom Operators", "Core ML Tools FAQs", "Flexible Input Shapes", "Graph Passes", "Contributing", "Image Input and Output", "Installing Core ML Tools", "Getting Started", "LibSVM", "Load and Convert Model Workflow", "MLModel Overview", "MLModel Utilities", "Model Exporting", "Model Input and Output Types", "Model Intermediate Language", "Model Prediction", "Model Scripting", "Model Tracing", "Multifunction Models", "New Conversion Options", "New Features", "Conversion", "Combining Compression Types", "Optimizing OPT Model", "Overview", "Examples", "Palettization", "Palettization Algorithms", "API Overview", "Palettization Overview", "Performance", "Pruning", "Pruning Algorithms", "API Overview", "Overview", "Performance", "Linear Quantization", "Quantization Algorithms", "API Overview", "Quantization Overview", "Performance", "Optimizing ResNet50 Model", "Optimizing StableDiffusion Model", "What\u2019s New", "Optimization Workflow", "What Is Core ML Tools?", "Compressing Neural Network Weights", "Scikit-learn", "Stateful Models", "Source and Conversion Formats", "TensorFlow 1 Workflow", "TensorFlow 2 Workflow", "Typed Execution", "Typed Execution Workflow Example", "Core ML Tools API Overview", "Updatable Models", "Nearest Neighbor Classifier", "Neural Network Classifier", "Pipeline Classifier", "Xcode Model Preview Types", "XGBoost"], "titleterms": {"": [5, 20, 62], "1": [6, 7, 18, 19, 28, 40, 60, 65, 68, 69, 72], "13": 16, "16": [16, 24, 65], "2": [10, 15, 18, 19, 28, 60, 68, 70], "3": [19, 39], "32": 72, "4": [19, 20, 39], "5": 20, "6": 20, "7": [20, 62], "8": [39, 62, 65], "A": 67, "For": [13, 18, 25, 39, 65], "Into": 6, "Not": 35, "One": 6, "The": [1, 10], "With": [20, 24, 29, 63, 70], "about": [33, 34], "accumul": 67, "accur": 35, "accuraci": [51, 56], "activ": [40, 56, 57, 58, 60], "adapt": 37, "add": 24, "addit": 64, "advantag": 21, "algorithm": [42, 46, 51, 56], "all": 30, "an": [6, 11, 20, 24, 38, 41, 51, 72, 77], "api": [0, 1, 21, 47, 52, 56, 57, 62, 63, 73], "appl": 43, "appli": 76, "applic": 67, "appropri": 71, "ar": 65, "arrai": [30, 34], "asr": 6, "attent": 67, "audio": 6, "avail": [16, 21, 43, 48, 53, 62], "awar": 57, "base": [42, 52, 56, 57, 63, 76], "befor": 7, "beginn": 25, "behavior": 32, "benchmark": [49, 51, 54, 59], "benefit": 2, "bert": [15, 70], "better": 20, "bisect": 30, "bit": 65, "bodi": 78, "build": 25, "cach": 67, "calibr": [42, 52, 56, 57, 63], "can": 7, "captur": 13, "chang": 30, "channel": [42, 48, 61], "choos": [20, 51, 71], "class": [8, 19], "classifi": [1, 7, 18, 30, 75, 76, 77], "code": 23, "combin": [37, 41], "compar": [2, 8, 72], "comparison": 72, "compil": [20, 34], "composit": [3, 18, 19], "compress": [40, 41, 42, 43, 60, 61, 63, 65], "comput": [20, 28, 34, 38, 72], "conclus": 42, "concret": 70, "conda": 25, "contribut": 23, "control": 65, "convers": [0, 3, 4, 13, 18, 20, 28, 32, 38, 40, 68], "convert": [0, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 24, 26, 28, 30, 31, 33, 38, 39, 40, 47, 52, 57, 67, 69, 70, 72, 76], "core": [0, 2, 7, 8, 10, 11, 13, 20, 21, 25, 29, 33, 39, 47, 52, 57, 62, 64, 67, 68, 73], "coreml": 61, "coremltool": 20, "creat": [33, 67, 75, 76, 77], "custom": [18, 19, 20, 65], "data": [42, 52, 56, 57, 60, 63], "dataset": 63, "declar": 20, "decompos": 3, "deep": [9, 20], "deepspeech": 6, "default": [16, 20, 21, 32, 68], "defin": [19, 70, 76], "definit": 7, "deploy": [13, 39, 68], "deprec": [34, 39], "descript": [29, 30], "detail": 2, "develop": [19, 68], "differ": [2, 31, 56, 68], "differenti": [46, 47], "dimens": 21, "distilbert": 15, "document": 23, "doubl": 30, "download": [7, 8, 26, 28, 72], "dtype": 32, "dynam": [6, 21], "each": [21, 63], "effect": [42, 43], "effici": 11, "embed": [40, 77], "enabl": 21, "encod": 10, "engin": 20, "enumer": 21, "environ": 25, "error": 20, "evalu": [8, 13, 30], "exampl": [3, 18, 24, 26, 28, 29, 30, 34, 37, 44, 47, 67, 69, 72, 78], "execut": [18, 38, 71, 72], "exist": 3, "export": [8, 11, 21, 31, 69], "exportedprogram": 13, "faq": 20, "fast": 34, "faster": 20, "featur": [30, 39, 43, 48, 53, 68], "feed": 6, "file": [6, 68], "filter": 24, "find": 16, "fine": [56, 60, 63], "fix": 20, "flag": 34, "flexibl": [18, 20, 21], "float": [16, 24, 30, 65], "foat": 72, "follow": 28, "format": [65, 68, 70, 76], "fragment": 10, "framework": 64, "free": [42, 52, 56, 57, 60, 63], "frequenc": 21, "from": [8, 12, 14, 21, 25, 28, 31, 34, 39], "frozen": 69, "function": [3, 65, 70], "get": [8, 26, 30, 77], "gpt": 10, "gptq": 56, "granular": [48, 58], "graph": [7, 13, 22, 69], "grayscal": 24, "group": 61, "handl": 20, "high": 20, "hint": 21, "how": [6, 43, 58, 68], "hub": 15, "i": [20, 64], "imag": [5, 7, 8, 18, 20, 24, 32, 34, 69], "imagetyp": 24, "impact": [42, 56], "implement": 19, "import": [3, 10, 11, 24], "improv": 68, "index": 75, "info": [1, 40, 49, 59], "inform": 13, "initi": 20, "input": [5, 6, 7, 8, 10, 18, 20, 21, 24, 29, 30, 32, 34, 38, 69], "instal": [3, 23, 25, 72], "instruct": 23, "intermedi": [18, 33], "ios15": 62, "issu": 23, "its": 76, "jit": 35, "joint": [41, 61], "jointli": 40, "k": [46, 47], "kera": [20, 70], "kv": 67, "label": [8, 23], "languag": [10, 11, 18, 33], "last": 19, "latenc": [42, 60], "layer": [19, 65], "learn": [9, 20, 33, 34, 66], "librari": [10, 11, 64], "libsvm": 27, "limit": [31, 36], "linear": [18, 55], "load": [5, 7, 8, 26, 28, 29, 76], "lora": 37, "lower": 62, "lut": [48, 65], "maco": [25, 34], "macos12": 62, "magnitudeprun": 51, "mai": 35, "make": [7, 8, 26, 72, 76], "make_updat": 76, "mean": [46, 47], "metadata": [5, 26, 29, 30], "method": 39, "methodologi": [46, 49, 51, 54, 59], "migrat": 39, "mil": [2, 3, 18, 19, 33, 67], "minim": 60, "minimum": [13, 68], "mix": 35, "ml": [0, 2, 7, 8, 10, 11, 13, 16, 18, 20, 21, 24, 25, 26, 29, 33, 38, 39, 47, 52, 57, 62, 64, 67, 68, 71, 72, 73, 76], "mlmodel": [0, 18, 29, 30], "mlmultiarrai": 24, "mlpackag": 41, "mobilenetv2": 8, "mode": [8, 13, 56], "model": [1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 13, 15, 16, 17, 18, 19, 20, 21, 24, 26, 28, 30, 31, 32, 33, 34, 35, 36, 37, 40, 41, 42, 47, 49, 52, 57, 59, 60, 61, 63, 67, 69, 70, 72, 74, 76, 77, 78], "more": [3, 13, 33, 34, 69], "multi": [21, 30, 34], "multiarrai": 24, "multifunct": 37, "name": [20, 32], "natur": 10, "nearest": [75, 77], "neighbor": [75, 77], "network": [2, 17, 20, 21, 26, 38, 65, 71, 76], "neural": [2, 17, 20, 21, 26, 38, 65, 71, 76], "new": [25, 38, 39, 62], "newer": 16, "nlp": 10, "nn": 20, "normal": 5, "note": 39, "now": 24, "number": 75, "numer": 20, "obtain": 13, "older": 39, "onli": [21, 65], "op": [3, 20], "open": [5, 11, 24, 78], "oper": [3, 18, 19, 68], "opt": 42, "optim": [0, 18, 20, 21, 42, 60, 61, 62, 63], "option": [4, 18, 24, 28, 38, 65], "origin": 13, "other": 0, "output": [8, 20, 24, 29, 30, 32, 34, 38, 72], "overview": [0, 18, 29, 33, 43, 47, 48, 52, 53, 57, 58, 73, 78], "packag": [16, 25], "palett": [40, 41, 42, 45, 46, 47, 48, 60, 61], "paramet": 72, "parti": 25, "pass": [22, 40], "per": [42, 48], "perform": [21, 43, 49, 54, 59, 68], "pick": 38, "pipelin": [40, 77], "pose": 78, "post": [47, 51, 56, 63], "posttrainingpalett": 61, "pre": [3, 69, 70], "prealloc": 21, "precis": [16, 38, 58, 71, 72], "predetermin": 21, "predict": [7, 8, 18, 26, 29, 34, 67, 72], "preprocess": [6, 7, 8, 20, 24], "prerequisit": 25, "preview": [1, 18, 78], "previou": [20, 39], "print": 8, "process": 10, "produc": [1, 26, 68], "program": [2, 16, 18, 24, 26, 33, 38, 71, 72], "prompt": 11, "protobuf": 8, "protocol": 40, "provid": [21, 32], "prune": [50, 51, 52, 60, 61], "python": [25, 34], "pytorch": [3, 5, 8, 10, 11, 12, 13, 18, 20, 24, 28, 32, 39, 40, 47, 56, 57, 67, 68], "qat": 57, "quantiz": [20, 40, 41, 42, 48, 55, 56, 57, 58, 60, 65], "queri": 23, "quick": 18, "rang": 21, "recommend": 70, "reduct": 60, "refer": [0, 21], "regist": [3, 19, 67], "regressor": 30, "releas": [20, 39], "renam": 30, "represent": 2, "requir": [5, 7, 8, 10, 11, 15, 16, 26, 31, 34], "reshap": 21, "resnet50": 60, "resort": 19, "resourc": 64, "result": [42, 46, 49, 51, 54, 59, 61], "run": [10, 11, 26], "runtim": [42, 43], "sampl": 5, "save": [16, 24, 26, 29], "scalar": [24, 61], "scale": [42, 48], "scenario": 60, "scikit": 66, "script": [10, 35], "secur": 23, "segment": [5, 78], "select": 21, "sensit": [46, 47], "sentenc": 10, "sequenti": 70, "set": [5, 6, 8, 10, 11, 13, 16, 21, 24, 25, 26, 28, 32, 38, 75, 76], "shape": [18, 20, 21, 32], "silicon": 43, "simpl": 67, "size": 60, "slightli": 7, "softwar": 62, "sourc": [23, 25, 68, 72], "spars": 40, "sparsegpt": [51, 52, 61], "sparsiti": 41, "spec": [8, 29, 76], "specif": [29, 68], "specifi": [34, 40], "stablediffus": 61, "start": [18, 20, 26], "state": 67, "statement": 24, "static": 6, "step": 19, "subclass": 70, "summari": 60, "support": [2, 19, 24, 25, 39, 64, 68, 76], "swift": 19, "symmetr": 58, "target": [13, 39, 68], "tensor": [8, 30, 71], "tensorflow": [6, 7, 14, 15, 18, 19, 24, 28, 39, 68, 69, 70, 72], "test": [7, 8, 24], "tf": 15, "than": 20, "third": 25, "time": [34, 52], "toi": [37, 67], "token": 11, "tool": [0, 20, 25, 39, 62, 64, 73], "torch": [8, 20, 21, 24, 40, 41, 47, 52], "torchscript": 13, "torchvis": [8, 20], "trace": [5, 8, 10, 24, 31, 35, 36], "train": [3, 47, 51, 52, 56, 57, 63, 69, 70], "transform": [3, 15, 30, 70], "tree": 18, "tune": [56, 60, 63], "type": [16, 18, 24, 30, 32, 34, 38, 41, 43, 71, 72, 75, 78], "unbound": 21, "unifi": 0, "unit": [28, 34, 38], "unsupport": 20, "untyp": 71, "up": [6, 10, 11, 25], "updat": [18, 21, 29, 74, 76, 77], "upgrad": [23, 25], "us": [3, 6, 19, 20, 21, 24, 26, 29, 30, 32, 34, 35, 40, 60, 67, 69], "user": 70, "util": [18, 30], "v": 26, "valu": 75, "vari": 7, "vector": 48, "version": [20, 25, 39, 40, 68], "virtual": 25, "vision": 1, "visual": 72, "weight": [30, 40, 56, 57, 65], "what": [62, 64], "which": 65, "why": [20, 34, 35, 37], "work": [6, 29, 57, 58], "workflow": [13, 19, 28, 39, 63, 69, 70, 72], "x": 68, "xcode": [1, 5, 16, 18, 24, 26, 78], "xgboost": 79, "your": 31}}) \ No newline at end of file +Search.setIndex({"alltitles": {"API Overview": [[47, null], [52, null], [57, null]], "API Reference": [[0, null], [21, "api-reference"]], "APIs for each workflow": [[63, "apis-for-each-workflow"]], "Accuracy Benchmarks": [[51, "accuracy-benchmarks"]], "Accuracy data": [[56, "accuracy-data"]], "Activation Quantization": [[58, "activation-quantization"]], "Add Image Preprocessing Options": [[24, "add-image-preprocessing-options"]], "Additional Resources": [[64, "additional-resources"]], "Algorithm Runtime": [[42, "algorithm-runtime"], [42, "id3"]], "Applications": [[67, "applications"]], "Availability of ML Programs": [[16, "availability-of-ml-programs"]], "Availability of features": [[43, "availability-of-features"]], "Available Only for Neural Networks": [[21, null]], "Bisect Model": [[30, "bisect-model"]], "Body Pose Example": [[78, "body-pose-example"]], "Build From Source": [[25, "build-from-source"]], "Calibration data based Pruning (SparseGPT)": [[52, "calibration-data-based-pruning-sparsegpt"]], "Calibration data based compression": [[42, "calibration-data-based-compression"]], "Calibration data based quantization": [[57, "calibration-data-based-quantization"], [57, "id3"]], "Capture the PyTorch Graph": [[13, "capture-the-pytorch-graph"]], "Change Model Tensor Input/Output Types": [[30, "change-model-tensor-input-output-types"]], "Choosing Custom Names for Input and Outputs": [[20, "choosing-custom-names-for-input-and-outputs"]], "Choosing an algorithm": [[51, "choosing-an-algorithm"]], "Choosing the Appropriate Precision": [[71, "choosing-the-appropriate-precision"]], "Classifiers": [[1, null], [18, "classifiers"]], "Combining Compression Types": [[41, null]], "Combining compression types on a Torch model": [[41, "combining-compression-types-on-a-torch-model"]], "Combining compression types on an mlpackage": [[41, "combining-compression-types-on-an-mlpackage"]], "Combining models: toy example with LoRA adapters": [[37, "combining-models-toy-example-with-lora-adapters"]], "Compare the Outputs": [[72, "compare-the-outputs"]], "Comparing ML Programs and Neural Networks": [[2, null]], "Composite Operators": [[3, null]], "Composite and Custom Operators": [[18, "composite-and-custom-operators"]], "Compressing Neural Network Weights": [[65, null]], "Compression info protocol": [[40, "compression-info-protocol"]], "Compute Plan": [[30, "compute-plan"]], "Compute Precision Parameter": [[72, null]], "Conclusions": [[42, "conclusions"]], "Contributing": [[23, null]], "Contributions": [[23, "contributions"]], "Control Which Layers are Quantized": [[65, "control-which-layers-are-quantized"]], "Conversion": [[40, null]], "Conversion Options": [[4, null], [18, "conversion-options"], [28, null]], "Convert All Double Multi-array Feature Descriptions to Float": [[30, "convert-all-double-multi-array-feature-descriptions-to-float"]], "Convert From TensorFlow 1": [[28, "convert-from-tensorflow-1"]], "Convert From TensorFlow 2": [[28, "convert-from-tensorflow-2"]], "Convert MIL to Core ML": [[33, "convert-mil-to-core-ml"]], "Convert Models to ML Programs": [[16, null]], "Convert Models to Neural Networks": [[17, null]], "Convert PyTorch models with quantized weights and activations": [[40, "convert-pytorch-models-with-quantized-weights-and-activations"]], "Convert a Dynamic Model to a Static One": [[6, "convert-a-dynamic-model-to-a-static-one"]], "Convert a Keras Model With Subclassing": [[70, "convert-a-keras-model-with-subclassing"]], "Convert a Pre-trained Model": [[69, "convert-a-pre-trained-model"], [70, "convert-a-pre-trained-model"]], "Convert a Sequential Model": [[70, "convert-a-sequential-model"]], "Convert a TensorFlow Concrete Function": [[70, "convert-a-tensorflow-concrete-function"]], "Convert a User-defined Model": [[70, "convert-a-user-defined-model"]], "Convert and Compare with Foat 32 Precision": [[72, "convert-and-compare-with-foat-32-precision"]], "Convert from PyTorch": [[28, "convert-from-pytorch"], [39, "convert-from-pytorch"]], "Convert from TensorFlow": [[39, "convert-from-tensorflow"]], "Convert models with jointly compressed weights": [[40, "convert-models-with-jointly-compressed-weights"]], "Convert models with palettized weights": [[40, "convert-models-with-palettized-weights"], [40, "id2"]], "Convert models with sparse weights": [[40, "convert-models-with-sparse-weights"], [40, "id1"]], "Convert the DistilBERT Transformer Model": [[15, "convert-the-distilbert-transformer-model"]], "Convert the Image to a Tensor": [[8, "convert-the-image-to-a-tensor"]], "Convert the Model": [[5, "convert-the-model"], [26, "convert-the-model"]], "Convert the Model and Preprocess an Audio File": [[6, "convert-the-model-and-preprocess-an-audio-file"]], "Convert the Model to Core ML": [[10, "convert-the-model-to-core-ml"], [11, "convert-the-model-to-core-ml"]], "Convert the Model to an ML Program": [[24, "convert-the-model-to-an-ml-program"], [72, "convert-the-model-to-an-ml-program"]], "Convert the Model to the ML Model Format": [[76, "convert-the-model-to-the-ml-model-format"]], "Convert the TF Hub BERT Transformer Model": [[15, "convert-the-tf-hub-bert-transformer-model"]], "Convert to Core ML": [[7, "convert-to-core-ml"], [8, "convert-to-core-ml"], [13, "convert-to-core-ml"]], "Convert to ML Program or Neural Network": [[38, "convert-to-ml-program-or-neural-network"]], "Converting Deep Learning Models": [[9, null]], "Converting TensorFlow 2 BERT Transformer Models": [[15, null]], "Converting Torch models to Core ML": [[52, "converting-torch-models-to-core-ml"]], "Converting a BERT Transformer Model": [[70, null]], "Converting a Natural Language Processing Model": [[10, null]], "Converting a PyTorch Segmentation Model": [[5, null]], "Converting a TensorFlow 1 DeepSpeech Model": [[6, null]], "Converting a TensorFlow 1 Image Classifier": [[7, null]], "Converting a torchvision Model from PyTorch": [[8, null]], "Converting an Open Efficient Language Model": [[11, null]], "Converting from PyTorch": [[12, null]], "Converting from TensorFlow": [[14, null]], "Converting quantized PyTorch models to Core ML": [[57, "converting-quantized-pytorch-models-to-core-ml"]], "Converting the Palettized PyTorch Model": [[47, "converting-the-palettized-pytorch-model"]], "Converting to a Stateful Core ML Model": [[67, "converting-to-a-stateful-core-ml-model"]], "Core ML Preallocates the Default Shape": [[21, null]], "Core ML Specification": [[29, "core-ml-specification"]], "Core ML Tools": [[0, null]], "Core ML Tools 7": [[62, "core-ml-tools-7"]], "Core ML Tools 8": [[62, "core-ml-tools-8"]], "Core ML Tools API Overview": [[73, null]], "Core ML Tools FAQs": [[20, null]], "Core ML Tools Optimization APIs": [[62, "core-ml-tools-optimization-apis"]], "Core ML Tools Versions": [[20, "core-ml-tools-versions"]], "Core ML model support": [[2, null]], "Create a MIL Program": [[33, "create-a-mil-program"]], "Create an Updatable Pipeline Model": [[77, "create-an-updatable-pipeline-model"]], "Create the Base Model": [[76, "create-the-base-model"]], "Create the Classifier": [[75, "create-the-classifier"]], "Create the Nearest Neighbor Classifier": [[77, "create-the-nearest-neighbor-classifier"]], "Creating a Stateful Model in MIL": [[67, "creating-a-stateful-model-in-mil"]], "Custom LUT Function": [[65, "custom-lut-function"]], "Custom Layer Support": [[19, null]], "Custom Operators": [[19, null]], "Data free Pruning": [[52, "data-free-pruning"]], "Data-free Pruning": [[52, "id1"]], "Data-free compression": [[42, "data-free-compression"], [42, "id1"], [63, "data-free-compression"]], "Data-free quantization": [[57, "data-free-quantization"]], "Decompose into Existing MIL Operators": [[3, "decompose-into-existing-mil-operators"]], "Default Format": [[68, "default-format"]], "Define make_updatable": [[76, "define-make-updatable"]], "Deprecated Flag": [[34, null]], "Deprecated Methods and Support": [[39, "deprecated-methods-and-support"]], "Developer Workflow": [[19, "developer-workflow"]], "Difference from Tracing": [[31, "difference-from-tracing"]], "Differences in Detail": [[2, "differences-in-detail"]], "Differentiable K-Means": [[46, "differentiable-k-means"]], "Differentiable K-Means Palettization API Example": [[47, "differentiable-k-means-palettization-api-example"]], "Documentation": [[23, "documentation"]], "Download for the Following Example": [[28, null], [28, null]], "Download the Class Labels": [[8, "download-the-class-labels"]], "Download the Model": [[7, "download-the-model"], [26, "download-the-model"]], "Download the Model and Install TensorFlow 1": [[72, "download-the-model-and-install-tensorflow-1"]], "Effect of per channel scales": [[42, "effect-of-per-channel-scales"]], "Effect on Runtime performance on Apple Silicon": [[43, "effect-on-runtime-performance-on-apple-silicon"]], "Enable Unbounded Ranges": [[21, "enable-unbounded-ranges"]], "Encode the Sentence Fragment as Input": [[10, "encode-the-sentence-fragment-as-input"]], "Enumerated Shapes Provide a Performance Advantage": [[21, null]], "Enumerated Shapes with Multi-input Models": [[21, null]], "Error in Declaring Network or Computing NN Outputs": [[20, "error-in-declaring-network-or-computing-nn-outputs"]], "Evaluate Classifier, Regressor, and Transformer Models": [[30, "evaluate-classifier-regressor-and-transformer-models"]], "Example": [[30, "example"]], "Example: A Simple Accumulator": [[67, "example-a-simple-accumulator"]], "Example: Toy Attention Model with Stateful KV-Cache": [[67, "example-toy-attention-model-with-stateful-kv-cache"]], "Examples": [[18, null], [29, "examples"], [44, null]], "Export and Convert your Model": [[31, "export-and-convert-your-model"]], "Export as a Frozen Graph and Convert": [[69, "export-as-a-frozen-graph-and-convert"]], "Export the Model": [[11, "export-the-model"]], "ExportedProgram": [[13, "exportedprogram"]], "Exporting Limitations": [[31, null]], "Fast Predictions": [[34, "fast-predictions"]], "Feature Availability": [[48, null], [53, null]], "Feature Development": [[68, "feature-development"]], "Feed the Input Into the Model": [[6, "feed-the-input-into-the-model"]], "Find the Model Type in a Model Package": [[16, "find-the-model-type-in-a-model-package"]], "Fine-tuning based algorithm for quantizing weight and/or activations": [[56, "fine-tuning-based-algorithm-for-quantizing-weight-and-or-activations"]], "Fixing High Numerical Error": [[20, "fixing-high-numerical-error"]], "Flexible Input Shapes": [[18, "flexible-input-shapes"], [21, null]], "Float 16 Default": [[16, null]], "For Beginners": [[25, null]], "For More Information": [[13, null]], "For Neural Network Format Only": [[65, null]], "For a Quick Start": [[18, "for-a-quick-start"]], "For older deployment targets": [[39, null], [39, null]], "Format Differences": [[68, "format-differences"]], "Format Specification": [[68, "format-specification"]], "GPTQ algorithm for weight quantization (post-training data calibration)": [[56, "gptq-algorithm-for-weight-quantization-post-training-data-calibration"]], "Get Weights Metadata": [[30, "get-weights-metadata"]], "Get the Embedding Model": [[77, "get-the-embedding-model"]], "Get the protobuf spec": [[8, "get-the-protobuf-spec"]], "Getting Started": [[26, null]], "Granularity": [[48, "granularity"]], "Graph Passes": [[22, null]], "Grayscale Images and Float 16 Multiarrays": [[24, null]], "Grouped channel palettization": [[61, "grouped-channel-palettization"]], "Handling an Unsupported Op": [[20, "handling-an-unsupported-op"]], "How ASR Works": [[6, null]], "How Quantization Works": [[58, "how-quantization-works"]], "How to Compress": [[43, "how-to-compress"]], "How to Produce": [[68, "how-to-produce"]], "Image Filter Model": [[24, "image-filter-model"]], "Image Input and Output": [[24, null]], "Image Inputs": [[18, "image-inputs"]], "Image Prediction": [[34, "image-prediction"]], "Image Prediction for a Multi-array Model": [[34, "image-prediction-for-a-multi-array-model"]], "Image Preprocessing for Converting torchvision": [[20, "image-preprocessing-for-converting-torchvision"]], "ImageType Input and Output Example": [[24, "imagetype-input-and-output-example"]], "ImageType Now Supported for Output": [[24, null]], "ImageType for Input": [[24, "imagetype-for-input"]], "ImageType for Output": [[24, "imagetype-for-output"]], "Images for Input and Output": [[8, null]], "Impact on Latency": [[42, "impact-on-latency"]], "Impact on accuracy with different modes": [[56, "impact-on-accuracy-with-different-modes"]], "Import Libraries and Set Up the Model": [[10, "import-libraries-and-set-up-the-model"], [11, "import-libraries-and-set-up-the-model"]], "Import Statements": [[24, "import-statements"]], "Import and Convert the Pre-trained Model": [[3, "import-and-convert-the-pre-trained-model"]], "In-memory Model": [[30, "in-memory-model"]], "Input and Output Type Options": [[38, "input-and-output-type-options"]], "Input the Image and Make a Prediction": [[7, "input-the-image-and-make-a-prediction"]], "Install Core ML Tools": [[25, "install-core-ml-tools"]], "Install From Source": [[25, "install-from-source"]], "Install Third-party Packages": [[25, "install-third-party-packages"]], "Install Transformers": [[3, null]], "Install or Build Core ML Tools": [[25, "install-or-build-core-ml-tools"]], "Install/upgrade instructions": [[23, null]], "Installing Core ML Tools": [[25, null]], "Issues and queries": [[23, "issues-and-queries"]], "Joint compression": [[61, "joint-compression"]], "Joint palettization and quantization": [[41, "joint-palettization-and-quantization"], [41, "id1"]], "Joint sparsity and palettization": [[41, "joint-sparsity-and-palettization"], [41, "id3"]], "Joint sparsity and quantization": [[41, "joint-sparsity-and-quantization"], [41, "id2"]], "K-Means": [[46, "k-means"]], "Keras Conversion": [[20, "keras-conversion"]], "Labels": [[23, "labels"]], "Latency reduction with activation quantization": [[60, "latency-reduction-with-activation-quantization"]], "Latency reduction with pruning": [[60, "latency-reduction-with-pruning"]], "Learn More About Image Input and Output": [[34, null]], "Learn More about MIL": [[33, null]], "LibSVM": [[27, null]], "Linear Quantization": [[55, null]], "Load a Test Image": [[7, "load-a-test-image"]], "Load and Convert Model Workflow": [[28, null]], "Load and Save the MLModel": [[29, "load-and-save-the-mlmodel"]], "Load the Graph Definition": [[7, "load-the-graph-definition"]], "Load the MobileNetV2 Model": [[8, "load-the-mobilenetv2-model"]], "Load the Model and Image": [[5, "load-the-model-and-image"]], "Load the Spec and Apply its Settings": [[76, "load-the-spec-and-apply-its-settings"]], "Load the Test Image": [[8, "load-the-test-image"]], "ML Program Benefits": [[2, "ml-program-benefits"]], "ML Program Typed Tensors": [[71, "ml-program-typed-tensors"]], "ML Program with Typed Execution": [[18, "ml-program-with-typed-execution"]], "ML Programs and MIL": [[2, "ml-programs-and-mil"]], "ML Programs vs. Neural Networks": [[26, null]], "MLModel": [[0, null], [18, "mlmodel"]], "MLModel Overview": [[18, "mlmodel-overview"], [29, null]], "MLModel Utilities": [[18, "mlmodel-utilities"], [30, null]], "MagnitudePruner": [[51, "magnitudepruner"]], "Make Predictions": [[26, "make-predictions"]], "Make a Core ML Prediction": [[8, "make-a-core-ml-prediction"]], "Make a Prediction": [[72, "make-a-prediction"]], "Make a Prediction with Core ML and Print Outputs": [[8, "make-a-prediction-with-core-ml-and-print-outputs"]], "Make a Prediction with Torch and Print Outputs": [[8, "make-a-prediction-with-torch-and-print-outputs"]], "Make a PyTorch Prediction and Compare": [[8, "make-a-pytorch-prediction-and-compare"]], "Make a Source Model Prediction for Comparison": [[72, "make-a-source-model-prediction-for-comparison"]], "Make a Visual Comparison": [[72, "make-a-visual-comparison"]], "Make the Model Updatable": [[76, "make-the-model-updatable"]], "Methodology": [[46, "methodology"], [49, "methodology"], [51, "methodology"], [54, "methodology"]], "Methodology:": [[59, "methodology"]], "Migration Workflow (Core ML Tools 3 \u2192 4)": [[39, "migration-workflow-core-ml-tools-3-4"]], "Minimum Deployment Target": [[13, null], [68, "minimum-deployment-target"]], "Mix Tracing and Scripting": [[35, "mix-tracing-and-scripting"]], "Model Exporting": [[31, null]], "Model Info": [[49, "model-info"], [59, "model-info"]], "Model Input and Output Types": [[32, null]], "Model Intermediate Language": [[33, null]], "Model Intermediate Language (MIL)": [[18, "model-intermediate-language-mil"]], "Model Prediction": [[18, "model-prediction"], [34, null]], "Model Representations": [[2, "model-representations"]], "Model Scripting": [[35, null]], "Model Tracing": [[36, null]], "Model fine-tuning based compression": [[63, "model-fine-tuning-based-compression"]], "More Examples": [[3, "more-examples"], [69, "more-examples"]], "Multi-array Prediction": [[34, "multi-array-prediction"]], "Multifunction Models": [[37, null]], "Nearest Neighbor Classifier": [[75, null]], "Neural Engine With Flexible Input Shapes": [[20, "neural-engine-with-flexible-input-shapes"]], "Neural Network Classifier": [[76, null]], "Neural Network Untyped Tensors": [[71, "neural-network-untyped-tensors"]], "New Conversion Options": [[38, null]], "New Features": [[39, null]], "New in Core ML Tools 8": [[39, "new-in-core-ml-tools-8"]], "Normalize and Segment the Image": [[5, "normalize-and-segment-the-image"]], "Obtain the Original PyTorch Model": [[13, "obtain-the-original-pytorch-model"]], "Open the Model in Xcode": [[5, "open-the-model-in-xcode"], [78, "open-the-model-in-xcode"], [78, "id1"]], "Optimization": [[0, null], [18, "optimization"]], "Optimization Workflow": [[63, null]], "Optimizations for iOS15 / macOS12 and lower": [[62, null]], "Optimizing OPT Model": [[42, null]], "Optimizing ResNet50 Model": [[60, null]], "Optimizing StableDiffusion Model": [[61, null]], "Other Converters": [[0, null]], "Overview": [[0, null], [33, "overview"], [43, null], [53, null], [78, "overview"]], "Palettization": [[42, "palettization"], [45, null]], "Palettization Algorithms": [[46, null]], "Palettization Overview": [[48, null]], "Palettization using data free compression": [[60, "palettization-using-data-free-compression"]], "Palettization using fine tuning": [[60, "palettization-using-fine-tuning"]], "Palettization with PostTrainingPalettizer": [[61, "palettization-with-posttrainingpalettizer"]], "Palettizing CoreML Model": [[61, "palettizing-coreml-model"]], "Palettizing a Core ML model": [[47, "palettizing-a-core-ml-model"]], "Palettizing a Torch model": [[47, "palettizing-a-torch-model"]], "Per-channel scale": [[48, "per-channel-scale"]], "Performance": [[49, null], [54, null], [59, null]], "Performance Benchmarks": [[49, "performance-benchmarks"], [54, "performance-benchmarks"]], "Performance Benchmarks:": [[59, "performance-benchmarks"]], "Performance Improvements": [[68, "performance-improvements"]], "Pick the Compute Units for Execution": [[38, "pick-the-compute-units-for-execution"]], "Pipeline Classifier": [[77, null]], "Post-Training Palettization API example": [[47, "post-training-palettization-api-example"], [47, "id1"]], "Post-Training Pruning": [[51, "post-training-pruning"]], "Post-training (data calibration) activation quantization": [[56, "post-training-data-calibration-activation-quantization"]], "Post-training (data-free) weight quantization": [[56, "post-training-data-free-weight-quantization"]], "Post-training calibration data based compression": [[63, "post-training-calibration-data-based-compression"]], "Post-training data-free compression": [[63, "post-training-data-free-compression"]], "Predict From the Compiled Model": [[34, "predict-from-the-compiled-model"]], "Predictions Can Vary Slightly": [[7, null]], "Preprocess the Image Before Converting": [[7, "preprocess-the-image-before-converting"]], "Preprocess the Image Input for torchvision Models": [[8, "preprocess-the-image-input-for-torchvision-models"]], "Preprocessing for TensorFlow": [[24, "preprocessing-for-tensorflow"]], "Preprocessing for Torch": [[24, "preprocessing-for-torch"]], "Prerequisites": [[25, "prerequisites"]], "Preview a Classifier Model": [[1, "preview-a-classifier-model"]], "Preview the Model in Xcode": [[78, "preview-the-model-in-xcode"], [78, "id2"]], "Previous Versions": [[39, "previous-versions"]], "Previous releases": [[20, "previous-releases"]], "Produce a Classifier Model": [[1, "produce-a-classifier-model"]], "Produce a Neural Network": [[26, "produce-a-neural-network"]], "Provide the Shape of the Input": [[32, "provide-the-shape-of-the-input"]], "Pruning": [[50, null]], "Pruning APIs for Core ML model": [[52, "pruning-apis-for-core-ml-model"]], "Pruning APIs for Torch model": [[52, "pruning-apis-for-torch-model"]], "Pruning Algorithms": [[51, null]], "Pruning with SparseGPT": [[61, "pruning-with-sparsegpt"]], "PyTorch": [[18, "pytorch"]], "PyTorch Conversion": [[20, "pytorch-conversion"]], "PyTorch Conversion Workflow": [[13, null]], "PyTorch Formats": [[68, "pytorch-formats"]], "PyTorch quantization APIs": [[56, null]], "Quantization": [[42, "quantization"]], "Quantization Algorithms": [[56, null]], "Quantization Aware Training (QAT)": [[57, "quantization-aware-training-qat"]], "Quantization Granularity": [[58, "quantization-granularity"]], "Quantization Options": [[65, "quantization-options"]], "Quantization Overview": [[58, null]], "Quantization precision": [[58, "quantization-precision"]], "Quantize to 1-8 Bits": [[65, "quantize-to-1-8-bits"]], "Quantize to Float 16 Weights": [[65, "quantize-to-float-16-weights"]], "Quantizing the LUT": [[48, "quantizing-the-lut"]], "Quantizing weights": [[57, "quantizing-weights"], [57, "id1"]], "Quantizing weights and activations": [[57, "quantizing-weights-and-activations"], [57, "id2"]], "Recommended Format": [[70, null]], "Register the Function": [[3, null]], "Registering States for a PyTorch Model": [[67, "registering-states-for-a-pytorch-model"]], "Release Notes": [[39, "release-notes"]], "Rename a Feature": [[30, "rename-a-feature"]], "Requirements": [[5, "requirements"], [7, "requirements"], [8, "requirements"], [10, "requirements"], [11, "requirements"], [15, null], [15, null], [26, "requirements"], [31, "requirements"]], "Requires Xcode 13 or Newer": [[16, null]], "Reshape Frequency Optimization Hint": [[21, "reshape-frequency-optimization-hint"]], "Results": [[42, "results"], [42, "id2"], [46, "results"], [49, "results"], [51, "results"], [54, "results"], [59, "results"], [61, "results"]], "Run the Converted Core ML Model": [[10, "run-the-converted-core-ml-model"], [11, "run-the-converted-core-ml-model"]], "Run the Example": [[26, "run-the-example"]], "Run the PyTorch Model": [[10, "run-the-pytorch-model"], [11, "run-the-pytorch-model"]], "Save ML Programs as Model Packages": [[16, "save-ml-programs-as-model-packages"]], "Save and Load the Model": [[26, "save-and-load-the-model"]], "Save and Open in Xcode": [[24, "save-and-open-in-xcode"]], "Scalar palettization": [[61, "scalar-palettization"]], "Scenario 1 : Minimizing model size": [[60, "scenario-1-minimizing-model-size"]], "Scenario 2: Minimizing latency": [[60, "scenario-2-minimizing-latency"]], "Scikit-learn": [[66, null]], "Security issue": [[23, null]], "Segmentation Example": [[78, "segmentation-example"]], "Select From Predetermined Shapes": [[21, "select-from-predetermined-shapes"]], "Sensitive K-Means": [[46, "sensitive-k-means"]], "Sensitive K-Means Palettization API Example": [[47, "sensitive-k-means-palettization-api-example"]], "Set Names for PyTorch Conversion": [[32, "set-names-for-pytorch-conversion"]], "Set Up Conda": [[25, "set-up-conda"]], "Set Up a New Virtual Environment": [[25, "set-up-a-new-virtual-environment"]], "Set Up the Model": [[6, "set-up-the-model"]], "Set the Compute Precision for an ML Program": [[38, "set-the-compute-precision-for-an-ml-program"]], "Set the Compute Units": [[28, "set-the-compute-units"]], "Set the Index Type": [[75, "set-the-index-type"]], "Set the ML Program Precision": [[16, "set-the-ml-program-precision"]], "Set the Model Metadata": [[26, "set-the-model-metadata"]], "Set the Model to Evaluation Mode": [[8, null], [13, null]], "Set the Model\u2019s Metadata": [[5, "set-the-models-metadata"]], "Set the Number of Neighbors Value": [[75, "set-the-number-of-neighbors-value"]], "Set the Range for Each Dimension": [[21, "set-the-range-for-each-dimension"]], "Set the Scalar Type": [[24, "set-the-scalar-type"]], "Set the dtype": [[32, "set-the-dtype"]], "Software Availability of Optimizations": [[62, "software-availability-of-optimizations"]], "Source and Conversion Formats": [[68, null]], "Source code": [[23, null]], "SparseGPT": [[51, "sparsegpt"]], "Specify pass pipeline": [[40, "specify-pass-pipeline"]], "Specifying Compute Units": [[34, "specifying-compute-units"]], "Starting a Deep Learning Core ML Model": [[20, "starting-a-deep-learning-core-ml-model"]], "Stateful Models": [[67, null]], "Step 1: Register the MIL Operator": [[19, "step-1-register-the-mil-operator"]], "Step 2: Define a TensorFlow Composite Operator": [[19, "step-2-define-a-tensorflow-composite-operator"]], "Step 3: Convert the Model": [[19, "step-3-convert-the-model"]], "Step 4: Implement Classes in Swift": [[19, "step-4-implement-classes-in-swift"]], "Summary": [[60, "summary"], [60, "id3"]], "Support": [[76, null]], "Supported Core ML File Formats": [[68, "supported-core-ml-file-formats"]], "Supported Libraries and Frameworks": [[64, "supported-libraries-and-frameworks"]], "Supported Operations": [[68, "supported-operations"]], "Supported Python and MacOS Versions": [[25, null]], "Supported Source Formats": [[68, "supported-source-formats"]], "Symmetric Quantization": [[58, "symmetric-quantization"]], "Target Conversion Formats": [[68, "target-conversion-formats"]], "TensorFlow 1": [[18, "tensorflow-1"]], "TensorFlow 1 Workflow": [[69, null]], "TensorFlow 2": [[18, "tensorflow-2"]], "TensorFlow 2 Workflow": [[70, null]], "TensorFlow versions 1.x Formats": [[68, "tensorflow-versions-1-x-formats"]], "TensorFlow versions 2.x Formats": [[68, "tensorflow-versions-2-x-formats"]], "Test With an Image": [[24, "test-with-an-image"]], "The GPT-2 NLP Model": [[10, "the-gpt-2-nlp-model"]], "The Vision Classifier API": [[1, null]], "Timing Example": [[34, "timing-example"]], "Tokenize the Prompt": [[11, "tokenize-the-prompt"]], "Torch.Export Dynamism": [[21, null], [21, null]], "TorchScript": [[13, "torchscript"]], "Trace / Export the Model": [[8, "trace-export-the-model"]], "Trace and Script the Model": [[10, "trace-and-script-the-model"]], "Trace the Model with Sample Input": [[5, "trace-the-model-with-sample-input"]], "Trace the PyTorch Model": [[24, "trace-the-pytorch-model"]], "Tracing Limitations": [[36, null]], "Training time Pruning": [[52, "training-time-pruning"]], "Trees and Linear Models": [[18, "trees-and-linear-models"]], "Typed Execution": [[71, null]], "Typed Execution Workflow Example": [[72, null]], "Types of Compression": [[43, "types-of-compression"]], "Types of Inputs and Outputs": [[34, "types-of-inputs-and-outputs"]], "Unified Conversion": [[0, null]], "Updatable Models": [[18, "updatable-models"], [74, null]], "Update a Core ML Model to Use Flexible Input Shapes": [[21, "update-a-core-ml-model-to-use-flexible-input-shapes"]], "Update the Metadata and Input/Output Descriptions": [[29, "update-the-metadata-and-input-output-descriptions"]], "Upgrade Core ML Tools": [[25, "upgrade-core-ml-tools"]], "Use Custom Operators as a Last Resort": [[19, null]], "Use Image Inputs": [[69, null]], "Use Images": [[32, "use-images"]], "Use JIT Script": [[35, "use-jit-script"]], "Use a Dynamic TensorFlow Model": [[6, "use-a-dynamic-tensorflow-model"]], "Use a compiled model for faster initialization": [[20, "use-a-compiled-model-for-faster-initialization"]], "Use an ImageType": [[24, "use-an-imagetype"]], "Use an MLMultiArray": [[24, "use-an-mlmultiarray"]], "Use compression info embedded in torch models": [[40, "use-compression-info-embedded-in-torch-models"]], "Use the Default Behavior": [[32, "use-the-default-behavior"]], "Use the MLModel for Prediction": [[29, "use-the-mlmodel-for-prediction"]], "Use the Model with Xcode": [[26, "use-the-model-with-xcode"]], "Using Compiled Python Models for Prediction": [[34, "using-compiled-python-models-for-prediction"]], "Using Composite Ops with PyTorch Conversion": [[3, "using-composite-ops-with-pytorch-conversion"]], "Using States with Predictions": [[67, "using-states-with-predictions"]], "Using the Metadata": [[30, "using-the-metadata"]], "Vector Palettization": [[48, "vector-palettization"]], "Version 1": [[40, "version-1"]], "What Is Core ML Tools?": [[64, null]], "What\u2019s New": [[62, null]], "Why Combine Models?": [[37, "why-combine-models"]], "Why Tracing May Not be Accurate": [[35, "why-tracing-may-not-be-accurate"]], "Why Use a Compiled Model?": [[34, "why-use-a-compiled-model"]], "Why optimize.torch is better than PyTorch\u2019s default quantization": [[20, "why-optimize-torch-is-better-than-pytorchs-default-quantization"]], "With calibration dataset": [[63, "with-calibration-dataset"]], "With fine-tuning": [[63, "with-fine-tuning"]], "Work With the Spec": [[29, "work-with-the-spec"]], "Working with Core ML Models": [[57, "working-with-core-ml-models"]], "Working with PyTorch Models": [[57, "working-with-pytorch-models"]], "XGBoost": [[79, null]], "Xcode Info": [[1, null]], "Xcode Model Preview Types": [[18, "xcode-model-preview-types"], [78, null]], "coremltools 4": [[20, "coremltools-4"]], "coremltools 5": [[20, "coremltools-5"]], "coremltools 6": [[20, "coremltools-6"]], "coremltools 7": [[20, "coremltools-7"]], "macOS Required for Model Prediction": [[34, null]]}, "docnames": ["index", "source/classifiers", "source/comparing-ml-programs-and-neural-networks", "source/composite-operators", "source/conversion-options", "source/convert-a-pytorch-segmentation-model", "source/convert-a-tensorflow-1-deepspeech-model", "source/convert-a-tensorflow-1-image-classifier", "source/convert-a-torchvision-model-from-pytorch", "source/convert-learning-models", "source/convert-nlp-model", "source/convert-openelm", "source/convert-pytorch", "source/convert-pytorch-workflow", "source/convert-tensorflow", "source/convert-tensorflow-2-bert-transformer-models", "source/convert-to-ml-program", "source/convert-to-neural-network", "source/coremltools-examples", "source/custom-operators", "source/faqs", "source/flexible-inputs", "source/graph-passes-intro", "source/how-to-contribute", "source/image-inputs", "source/installing-coremltools", "source/introductory-quickstart", "source/libsvm-conversion", "source/load-and-convert-model", "source/mlmodel", "source/mlmodel-utilities", "source/model-exporting", "source/model-input-and-output-types", "source/model-intermediate-language", "source/model-prediction", "source/model-scripting", "source/model-tracing", "source/multifunction-models", "source/new-conversion-options", "source/new-features", "source/opt-conversion", "source/opt-joint-compression", "source/opt-opt1_3", "source/opt-overview", "source/opt-overview-examples", "source/opt-palettization", "source/opt-palettization-algos", "source/opt-palettization-api", "source/opt-palettization-overview", "source/opt-palettization-perf", "source/opt-pruning", "source/opt-pruning-algos", "source/opt-pruning-api", "source/opt-pruning-overview", "source/opt-pruning-perf", "source/opt-quantization", "source/opt-quantization-algos", "source/opt-quantization-api", "source/opt-quantization-overview", "source/opt-quantization-perf", "source/opt-resnet", "source/opt-stable-diffusion", "source/opt-whats-new", "source/opt-workflow", "source/overview-coremltools", "source/quantization-neural-network", "source/sci-kit-learn-conversion", "source/stateful-models", "source/target-conversion-formats", "source/tensorflow-1-workflow", "source/tensorflow-2", "source/typed-execution", "source/typed-execution-example", "source/unified-conversion-api", "source/updatable-model-examples", "source/updatable-nearest-neighbor-classifier", "source/updatable-neural-network-classifier-on-mnist-dataset", "source/updatable-tiny-drawing-classifier-pipeline-model", "source/xcode-model-preview-types", "source/xgboost-conversion"], "envversion": {"sphinx": 62, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2}, "filenames": ["index.rst", "source/classifiers.md", "source/comparing-ml-programs-and-neural-networks.md", "source/composite-operators.md", "source/conversion-options.rst", "source/convert-a-pytorch-segmentation-model.md", "source/convert-a-tensorflow-1-deepspeech-model.md", "source/convert-a-tensorflow-1-image-classifier.md", "source/convert-a-torchvision-model-from-pytorch.md", "source/convert-learning-models.rst", "source/convert-nlp-model.md", "source/convert-openelm.md", "source/convert-pytorch.rst", "source/convert-pytorch-workflow.md", "source/convert-tensorflow.rst", "source/convert-tensorflow-2-bert-transformer-models.md", "source/convert-to-ml-program.md", "source/convert-to-neural-network.md", "source/coremltools-examples.md", "source/custom-operators.md", "source/faqs.md", "source/flexible-inputs.md", "source/graph-passes-intro.md", "source/how-to-contribute.md", "source/image-inputs.md", "source/installing-coremltools.md", "source/introductory-quickstart.md", "source/libsvm-conversion.md", "source/load-and-convert-model.md", "source/mlmodel.md", "source/mlmodel-utilities.md", "source/model-exporting.md", "source/model-input-and-output-types.md", "source/model-intermediate-language.md", "source/model-prediction.md", "source/model-scripting.md", "source/model-tracing.md", "source/multifunction-models.md", "source/new-conversion-options.md", "source/new-features.md", "source/opt-conversion.md", "source/opt-joint-compression.md", "source/opt-opt1_3.md", "source/opt-overview.md", "source/opt-overview-examples.rst", "source/opt-palettization.rst", "source/opt-palettization-algos.md", "source/opt-palettization-api.md", "source/opt-palettization-overview.md", "source/opt-palettization-perf.md", "source/opt-pruning.rst", "source/opt-pruning-algos.md", "source/opt-pruning-api.md", "source/opt-pruning-overview.md", "source/opt-pruning-perf.md", "source/opt-quantization.rst", "source/opt-quantization-algos.md", "source/opt-quantization-api.md", "source/opt-quantization-overview.md", "source/opt-quantization-perf.md", "source/opt-resnet.md", "source/opt-stable-diffusion.md", "source/opt-whats-new.md", "source/opt-workflow.md", "source/overview-coremltools.md", "source/quantization-neural-network.md", "source/sci-kit-learn-conversion.md", "source/stateful-models.md", "source/target-conversion-formats.md", "source/tensorflow-1-workflow.md", "source/tensorflow-2.md", "source/typed-execution.md", "source/typed-execution-example.md", "source/unified-conversion-api.md", "source/updatable-model-examples.rst", "source/updatable-nearest-neighbor-classifier.md", "source/updatable-neural-network-classifier-on-mnist-dataset.md", "source/updatable-tiny-drawing-classifier-pipeline-model.md", "source/xcode-model-preview-types.md", "source/xgboost-conversion.md"], "indexentries": {"classifier": [[1, "index-0", false], [7, "index-0", false], [75, "index-0", false], [76, "index-0", false], [77, "index-0", false]], "combine tracing and scripting": [[10, "index-0", false]], "compared to ml program": [[2, "index-0", false]], "compared to neural network": [[2, "index-0", false]], "compiling": [[2, "index-1", false]], "compiling an ml program": [[2, "index-1", false]], "composite operators": [[3, "index-0", false]], "compress weights": [[65, "index-0", false]], "compute precision": [[38, "index-2", false]], "compute units": [[28, "index-3", false]], "conversion options": [[38, "index-0", false]], "convert a keras model": [[70, "index-4", false]], "convert bert transformer models": [[15, "index-0", false]], "convert concrete function": [[70, "index-5", false]], "convert deepspeech model": [[6, "index-0", false]], "convert from": [[28, "index-1", false], [28, "index-2", false], [69, "index-0", false], [70, "index-0", false]], "convert image classifier": [[7, "index-0", false], [26, "index-0", false]], "convert natural language processing model": [[10, "index-0", false]], "convert pre-trained model": [[69, "index-1", false], [70, "index-1", false]], "convert segmentation model": [[5, "index-0", false]], "convert sequential model": [[70, "index-3", false]], "convert tensorflow 1": [[7, "index-0", false]], "convert to": [[16, "index-0", false], [17, "index-0", false], [28, "index-0", false], [68, "index-1", false]], "convert torchvision model": [[8, "index-0", false]], "convert user-defined model": [[70, "index-2", false]], "convert workflow": [[13, "index-0", false], [69, "index-0", false], [70, "index-0", false]], "convert_to parameter": [[38, "index-1", false]], "core ml neuralnetwork": [[2, "index-2", false]], "custom operators": [[19, "index-0", false]], "dtype": [[32, "index-1", false]], "enumerated shapes": [[21, "index-1", false]], "exportedprogram": [[31, "index-0", false]], "flexible input shapes": [[21, "index-0", false]], "gpu runtime": [[2, "index-1", false]], "graph passes": [[22, "index-0", false]], "image": [[7, "index-0", false], [34, "index-0", false]], "images": [[24, "index-0", false], [24, "index-1", false], [24, "index-2", false]], "imagetype": [[24, "index-0", false], [32, "index-0", false]], "input and output": [[24, "index-0", false], [34, "index-0", false]], "input shapes": [[21, "index-0", false]], "input type options": [[32, "index-0", false], [38, "index-3", false]], "jit script": [[35, "index-0", false]], "libsvm": [[27, "index-0", false]], "make prediction": [[8, "index-2", false]], "metadata": [[5, "index-1", false], [26, "index-1", false]], "metal performance shaders graph framework": [[2, "index-1", false]], "mil": [[2, "index-3", false], [33, "index-0", false]], "mil operators": [[3, "index-1", false]], "minimum_deployment_target": [[38, "index-1", false]], "ml program": [[2, "index-0", false], [2, "index-1", false], [16, "index-0", false], [16, "index-1", false], [16, "index-2", false], [26, "index-0", false], [26, "index-1", false], [26, "index-2", false], [26, "index-3", false], [28, "index-3", false], [38, "index-1", false], [38, "index-2", false], [68, "index-1", false]], "mlmodel": [[29, "index-0", false], [30, "index-0", false]], "mlmultiarray": [[24, "index-0", false], [32, "index-0", false]], "model exporting": [[31, "index-0", false]], "model intermediate language": [[2, "index-3", false], [33, "index-0", false]], "model package": [[16, "index-2", false], [26, "index-2", false]], "model preview types": [[78, "index-0", false]], "model preview types in xcode": [[78, "index-0", false]], "model scripting": [[35, "index-0", false]], "model tracing": [[36, "index-0", false]], "multifunction model": [[37, "index-0", false]], "nearest neighbor classifier": [[75, "index-0", false]], "nearest neighbor updatable": [[75, "index-0", false]], "neural network": [[2, "index-0", false], [17, "index-0", false], [38, "index-1", false], [65, "index-0", false], [68, "index-1", false], [76, "index-0", false]], "neural network updatable": [[76, "index-0", false]], "open model": [[26, "index-3", false]], "output type options": [[32, "index-0", false], [38, "index-3", false]], "overview": [[48, "index-0", false], [53, "index-0", false]], "overview and spec": [[29, "index-0", false]], "palettization": [[48, "index-0", false]], "pass_pipeline": [[22, "index-0", false]], "pil": [[1, "index-1", false]], "pipeline classifier": [[77, "index-0", false]], "pipeline updatable": [[77, "index-0", false]], "precision type": [[16, "index-1", false]], "prediction": [[29, "index-0", false], [34, "index-0", false]], "preprocess image input": [[8, "index-1", false]], "preprocessing": [[24, "index-2", false]], "preprocessing for images": [[8, "index-1", false], [24, "index-2", false]], "preview": [[5, "index-2", false]], "preview and produce": [[1, "index-0", false]], "preview in xcode": [[5, "index-2", false]], "protobuf spec": [[8, "index-3", false]], "pruning": [[53, "index-0", false]], "pytorch": [[5, "index-0", false], [5, "index-1", false], [8, "index-0", false], [8, "index-1", false], [8, "index-2", false], [10, "index-0", false], [13, "index-0", false], [28, "index-2", false], [31, "index-0", false], [32, "index-2", false], [35, "index-0", false], [36, "index-0", false], [68, "index-0", false]], "pytorch model": [[5, "index-1", false]], "quantization": [[65, "index-0", false]], "rangedim": [[21, "index-2", false]], "rename a feature": [[30, "index-0", false]], "save a model package": [[16, "index-2", false]], "save and load": [[26, "index-2", false]], "save as model package": [[16, "index-2", false]], "scalar type for imagetype": [[24, "index-1", false]], "scikit-learn": [[66, "index-0", false]], "set model metadata": [[5, "index-1", false], [26, "index-1", false]], "set names": [[32, "index-2", false]], "set the scalar type": [[24, "index-1", false]], "shapes": [[21, "index-0", false]], "source formats": [[68, "index-0", false]], "specify compute units": [[34, "index-0", false]], "stateful model": [[67, "index-0", false]], "target format": [[68, "index-1", false]], "tensorflow": [[26, "index-1", false], [28, "index-1", false], [68, "index-0", false]], "tensorflow 1": [[6, "index-0", false], [7, "index-0", false], [69, "index-0", false], [69, "index-1", false]], "tensorflow 2": [[15, "index-0", false], [70, "index-0", false], [70, "index-1", false], [70, "index-2", false], [70, "index-3", false], [70, "index-4", false], [70, "index-5", false]], "tensorflow model": [[26, "index-1", false]], "torchscript": [[13, "index-0", false], [35, "index-0", false], [36, "index-0", false]], "torchvision model": [[8, "index-0", false]], "transformer": [[67, "index-0", false]], "typed execution": [[2, "index-1", false]], "updatable nearest neighbor classifier": [[75, "index-0", false]], "updatable neural network classifier": [[76, "index-0", false]], "updatable pipeline classifier": [[77, "index-0", false]], "use with xcode": [[26, "index-3", false]], "utilities": [[30, "index-0", false]], "vision classifier observation api": [[1, "index-2", false]], "weight metadata": [[30, "index-1", false]], "with mlmodel": [[29, "index-0", false]], "workflow": [[28, "index-0", false]], "xcode": [[5, "index-2", false], [26, "index-3", false], [78, "index-0", false]], "xgboost": [[79, "index-0", false]]}, "objects": {}, "objnames": {}, "objtypes": {}, "terms": {"": [0, 2, 7, 8, 10, 11, 13, 19, 21, 23, 26, 28, 29, 30, 31, 34, 35, 36, 37, 40, 42, 43, 46, 47, 48, 51, 52, 53, 54, 56, 57, 60, 61, 63, 64, 65, 67, 69, 71, 72, 75, 76, 78], "0": [3, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 21, 24, 25, 26, 27, 28, 30, 31, 32, 33, 34, 35, 37, 39, 41, 42, 46, 47, 49, 51, 52, 53, 54, 56, 57, 59, 60, 61, 64, 67, 68, 69, 70, 72, 75, 76, 77, 78], "00": 26, "01": [41, 46, 51, 76], "01000001b": 53, "01963953e": 7, "02": [26, 42], "03": [46, 51, 52, 59, 61], "04": [7, 46, 51, 56, 59, 61], "044715": 70, "05": [7, 46, 56, 59, 60], "0507009873554805": 3, "05m": 60, "06": [26, 51, 54], "07": [42, 51, 56, 59], "07m": 60, "08": [42, 46, 51], "09": [56, 59, 60], "0_224": [28, 69], "0_224_frozen": [28, 69], "0_unet": 61, "0b1": [62, 63, 72], "0b3": [16, 72], "0e3": 67, "1": [1, 2, 3, 5, 8, 10, 11, 13, 14, 15, 20, 21, 23, 24, 25, 26, 27, 29, 30, 31, 33, 34, 35, 36, 37, 39, 41, 42, 43, 46, 47, 48, 49, 51, 53, 54, 56, 57, 59, 61, 62, 64, 67, 70, 75, 76, 77, 78], "10": [3, 13, 15, 16, 21, 24, 25, 26, 30, 33, 39, 42, 47, 56, 59, 68, 69, 70, 72, 73, 76], "100": [21, 23, 24, 26, 33, 52, 57, 60, 67, 72], "1000": [8, 26, 41, 57, 60, 75], "10000": 10, "1001": [26, 69, 75], "1024": [42, 48, 52, 67], "1097": 26, "11": [16, 24, 25, 26, 37, 39, 42, 46, 68], "12": [5, 6, 13, 15, 16, 25, 26, 39, 42, 54, 60, 68, 78], "121": 67, "1240": 34, "125": 52, "127": [24, 26, 58, 70], "128": [5, 11, 20, 21, 31, 42, 46, 51, 56, 57, 58, 60, 61, 63, 70, 75, 76, 77], "12_h": 15, "12m": 60, "13": [13, 24, 25, 38, 39, 49, 54, 60, 64, 68, 73], "14": [34, 42, 46, 49, 51, 54, 56, 59, 64], "14m": 60, "15": [5, 7, 8, 13, 16, 24, 25, 34, 42, 49, 54, 57, 58, 59, 60, 61, 68, 72], "15576135e": 7, "15x": 60, "16": [2, 6, 20, 30, 31, 32, 36, 38, 39, 40, 42, 43, 48, 49, 54, 57, 60, 61, 63, 71, 72, 78], "17": [15, 21, 34, 42, 51, 54, 59, 75], "18": [42, 59, 60, 64], "18763": 42, "19": 6, "192": 70, "198": 10, "1b": 43, "1e": [33, 40, 69, 70, 72], "1f": 34, "1gb": 61, "1m": 60, "1x": 42, "2": [2, 3, 5, 6, 7, 8, 11, 13, 14, 20, 21, 23, 24, 25, 26, 30, 31, 33, 34, 35, 36, 37, 39, 40, 41, 42, 43, 46, 47, 48, 49, 53, 54, 61, 62, 64, 67, 69, 72, 75, 76, 78], "20": [34, 57, 59, 69, 75], "200": [57, 60], "2023": [34, 43], "2024": [37, 43, 67], "2048": [6, 30, 42, 47], "205": 26, "20db": 72, "21": [5, 26, 42, 61], "2109375": 8, "21910377e": 7, "22": [42, 60, 61, 64], "224": [1, 5, 7, 8, 13, 20, 21, 24, 26, 28, 34, 36, 57, 60, 69], "225": [5, 8, 20, 24], "226": [8, 20, 24], "229": [5, 8, 20, 24], "23": [42, 46, 60], "231": 61, "238": 67, "23m": 60, "24": [26, 42], "24x": 42, "25": [5, 21, 46, 60, 76], "25000": 60, "255": [5, 7, 8, 20, 24, 77], "256": [24, 28, 40, 41, 67], "25600": 30, "257798194885254": 8, "25m": 60, "26": [6, 42, 60], "27": [46, 59], "270m": 11, "28": [42, 46, 54, 70, 76, 77], "28x28": 76, "29": [46, 60], "299": [28, 70], "2gb": 61, "2x": [42, 63, 65], "3": [1, 5, 7, 8, 11, 13, 15, 20, 21, 24, 25, 26, 28, 30, 31, 33, 34, 35, 36, 37, 40, 41, 42, 43, 46, 48, 49, 51, 53, 54, 57, 60, 62, 64, 67, 69, 70, 72, 75, 76, 77, 78], "30": [42, 75], "300": 60, "301507": 7, "305": 26, "3072": 40, "31": 42, "31087": 42, "312": 75, "313": 75, "314": 75, "315": 75, "316": 75, "32": [2, 11, 16, 20, 24, 31, 32, 37, 42, 43, 61, 63, 65, 70, 71, 76], "320": 75, "32000": 67, "321": 75, "322": 75, "323": 75, "324": 75, "32l": 76, "33": 42, "33356332e": 7, "34": 42, "34m": 60, "35": 42, "36": [59, 67], "36m": 37, "37": [49, 51, 54, 59], "38": [42, 46, 49, 59], "38136549121856": 72, "384": 15, "38m": 60, "39": 42, "3b": 42, "3d": 61, "3x": 42, "4": [2, 7, 11, 12, 14, 15, 16, 21, 25, 31, 33, 40, 41, 42, 43, 46, 47, 48, 49, 53, 54, 56, 57, 58, 59, 60, 61, 62, 63, 65, 67, 68, 70, 72, 75, 76], "40": [51, 61], "400": 57, "406": [5, 8, 20, 24], "4096": 65, "41": [42, 49], "4245": 67, "428": 26, "43": [42, 49, 59], "44": [46, 59], "45": [21, 33, 42, 49, 59], "456": [5, 8, 20, 24, 42], "46": [11, 46, 54], "47": [42, 46, 49, 51], "48": [42, 49, 54, 59, 60], "485": [5, 8, 20, 24], "49": [42, 59, 67], "4921875": 8, "49mb": 60, "4bit": 61, "4x": 42, "4x6000x32": 37, "5": [10, 11, 16, 24, 25, 30, 39, 41, 42, 46, 49, 52, 54, 60, 67, 70, 72, 75, 76, 77], "50": [21, 26, 30, 51, 52, 54, 59, 60, 61], "51": [42, 60], "512": [47, 57, 61, 72], "52": [49, 51, 54, 59], "521": 42, "522": 42, "53": 42, "53633975982666": 8, "54": [46, 49], "55": [42, 51, 60], "56": 53, "57": 42, "58": [42, 46, 51], "59": [42, 46], "6": [5, 13, 21, 24, 25, 26, 30, 32, 36, 39, 42, 43, 46, 47, 48, 49, 52, 60, 61, 62, 63, 65, 67, 70, 76], "60": [34, 42, 46, 51, 60], "6000": 37, "6000x6000": 37, "61": 42, "62": 42, "62500": 60, "63": [42, 46, 49], "636": 6, "63m": 60, "64": [10, 11, 31, 35, 42, 47, 48, 51, 61, 63, 76], "642789840698242": 8, "65": [46, 49], "66": [42, 46, 51, 56, 59, 60], "668": 42, "67": [21, 26, 46, 49, 51], "6703": 42, "6704": 42, "6732632423543772": 3, "676": 42, "677": 42, "68": [42, 46], "68707207e": 7, "69": [51, 60], "6bit": 61, "6x": 42, "7": [6, 7, 16, 17, 25, 26, 27, 28, 32, 34, 39, 40, 46, 49, 53, 54, 60, 65, 67, 68, 72, 76], "70": [13, 31, 46, 63], "70db": 72, "71": [26, 42, 46, 49, 51, 56, 59, 60, 72], "71m": 60, "72": [30, 42, 60], "72m": 37, "73": [51, 54, 60], "74": [26, 60], "747": 42, "75": [46, 51, 52, 53, 54, 60, 63], "75957654819682": 72, "76": [42, 46, 51, 56, 59, 60], "768_a": 15, "768m": 37, "77": [51, 54, 56, 59], "78": [26, 42, 56, 59], "79": 24, "79885838e": 7, "7b": [11, 67], "8": [5, 8, 11, 13, 15, 16, 24, 25, 27, 31, 40, 41, 42, 43, 46, 47, 48, 49, 52, 54, 56, 57, 58, 60, 61, 63, 68, 76], "80": [41, 42, 46, 56, 59, 61], "8046875": 8, "81": [42, 46, 60, 67], "819": 42, "82": [46, 49], "820": 42, "821": 42, "822": 42, "823": 42, "824": 42, "827": 42, "828": 42, "829": 42, "83": [42, 51], "834": 42, "835": 42, "85": 49, "86": [46, 51, 56, 59], "86m": 60, "89": [46, 56, 59], "8bit": 61, "8gb": 61, "9": [25, 27, 39, 42, 46, 49, 60, 67, 72, 76], "90": 60, "91": 42, "92": [42, 46, 49, 56, 59], "93": [42, 46, 49], "933": 61, "94": [46, 49, 59], "942": 61, "95": 72, "95x": 42, "97": [46, 49], "98": [42, 46, 56, 59], "98c77c72c722": 75, "99": [46, 49, 56, 59], "993": 42, "A": [1, 2, 5, 10, 19, 23, 25, 29, 30, 31, 33, 34, 36, 37, 42, 47, 52, 53, 57, 63, 70, 71, 76], "And": [57, 75], "As": [2, 3, 5, 8, 10, 11, 13, 19, 20, 23, 24, 31, 34, 40, 41, 42, 43, 52, 71, 72], "At": [2, 11, 29, 31, 35, 37, 42], "Be": [3, 20], "But": 11, "By": [2, 8, 24, 28, 34, 40, 65, 71, 75], "For": [0, 1, 2, 3, 5, 7, 8, 10, 11, 16, 19, 20, 21, 22, 23, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 40, 41, 42, 43, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 60, 61, 62, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 76, 78, 79], "If": [2, 5, 7, 8, 10, 13, 16, 18, 20, 21, 23, 24, 25, 26, 30, 31, 33, 34, 35, 36, 38, 39, 40, 47, 51, 52, 56, 63, 70, 71, 72, 75], "In": [1, 2, 3, 6, 7, 8, 10, 11, 13, 16, 19, 21, 24, 26, 28, 31, 32, 33, 34, 36, 37, 39, 40, 41, 42, 43, 46, 47, 49, 51, 52, 53, 57, 58, 59, 60, 61, 63, 67, 69, 71, 76, 77], "It": [3, 6, 7, 8, 13, 16, 19, 24, 26, 29, 30, 31, 37, 43, 46, 48, 51, 54, 57, 63, 67, 73, 76, 77], "No": 40, "On": [2, 16, 35, 43, 53, 58, 59, 60], "One": [24, 41, 42, 47, 51, 52, 67], "Such": [42, 71], "That": [43, 47], "The": [2, 3, 5, 6, 7, 8, 11, 13, 15, 16, 18, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 44, 46, 47, 48, 49, 51, 52, 54, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 75, 76, 77, 78], "Then": [26, 67, 70], "There": [2, 31, 40, 43, 46, 47, 58], "These": [21, 24, 26, 28, 34, 39, 42, 43, 46], "To": [1, 2, 3, 5, 6, 7, 8, 10, 11, 13, 15, 16, 17, 19, 21, 23, 24, 25, 26, 28, 29, 30, 32, 33, 34, 35, 37, 38, 39, 40, 43, 47, 51, 52, 57, 61, 63, 67, 68, 69, 70, 72, 76, 78], "With": [3, 6, 8, 10, 13, 17, 21, 26, 34, 39, 40, 42, 60, 61, 64, 67, 71, 72], "_": [10, 34, 35], "__future__": 7, "__init__": [5, 10, 19, 21, 31, 35, 36, 37, 65, 67, 70], "_compute_snr": 72, "_coreml_": 40, "_featuretypes_pb2": 76, "_get_input": 3, "_loopbodi": 35, "_minimum_updatable_spec_vers": 77, "_normal": 72, "_op_req": 19, "_save_h5_as_frozen_pb": 20, "_tf_ops_registri": 3, "_torch_ops_registri": 3, "a100": 42, "a16": [49, 54, 59], "a16w16": 41, "a16w8": 41, "a17": [43, 58, 59, 60, 62], "a17pro": 41, "a8w16": [41, 63], "a8w4": 41, "a8w8": [41, 63], "abil": 71, "abl": [34, 42, 54, 60, 61, 63, 65, 76], "about": [0, 1, 5, 7, 8, 10, 11, 13, 20, 23, 24, 25, 26, 29, 30, 37, 38, 39, 40, 42, 43, 46, 47, 51, 52, 54, 57, 60, 61, 64, 68, 70, 73, 77, 78], "abov": [1, 3, 6, 8, 13, 16, 19, 24, 33, 37, 40, 41, 42, 48, 52, 54, 57, 58, 60, 61, 62, 69, 72], "abscent": 31, "abstract": 2, "acc": 67, "acc_in": 67, "acc_out": 67, "acceler": [62, 71], "accept": [6, 21, 24, 47, 76], "access": [25, 30, 43, 62, 63], "accommod": 33, "accompani": 53, "accord": 24, "accordingli": [47, 61, 63, 67], "account": 63, "accumulator_st": 67, "accumulator_valu": 67, "accur": [13, 40, 41, 46, 51, 52, 56, 61, 72], "accuraci": [6, 7, 8, 18, 21, 23, 26, 30, 41, 42, 43, 44, 46, 49, 52, 54, 57, 58, 59, 60, 61, 63, 65, 69, 71, 72, 76], "achiev": [34, 42, 43, 46, 47, 51, 54, 58, 60, 63], "across": [46, 51, 52, 58, 61, 67], "act_quant_model_config": 63, "act_quant_op_config": 63, "action": [5, 71], "activ": [7, 25, 34, 35, 41, 43, 47, 51, 59, 62, 63, 70, 72, 76], "activation_config": 57, "activation_dtyp": 41, "actual": [2, 19, 21, 28, 34, 35, 38, 42, 57, 60, 72], "ad": [2, 3, 7, 8, 13, 20, 24, 26, 31, 37, 49, 59, 62, 68, 77, 78], "adam": 76, "adapt": [51, 52, 63], "adapt_model_with_lora": 37, "adapted_model": 37, "adapted_model_1": 37, "adapted_model_2": 37, "adapter_1": 37, "adapter_2": 37, "add": [1, 3, 5, 8, 15, 18, 19, 20, 22, 23, 26, 34, 52, 67, 69, 71, 72, 76, 77, 78], "add_funct": 37, "add_multiarray_ndshape_enumer": 21, "add_weight": 70, "addit": [1, 2, 6, 7, 8, 16, 23, 24, 26, 28, 37, 40, 47, 51, 54, 57, 61, 63, 67, 71, 76], "address": [7, 23], "adher": [15, 19, 53], "adjust": [57, 61, 76], "adopt": 34, "advanc": 26, "advancedquantizedlayerselector": 65, "advantag": [2, 26, 34, 49, 53], "aeroplan": [5, 78], "affect": [65, 71], "affin": [40, 58], "after": [6, 7, 19, 20, 24, 25, 26, 32, 34, 35, 57, 65, 67, 69, 78], "again": [3, 7, 34, 72, 76], "against": 30, "ahead": 62, "ai": 37, "aim": 61, "algorithm": [8, 43, 45, 47, 49, 50, 52, 54, 55, 57, 60, 61, 62, 63, 65], "all": [1, 2, 3, 6, 8, 10, 18, 19, 20, 23, 24, 25, 28, 31, 33, 34, 35, 36, 38, 39, 40, 42, 43, 46, 47, 49, 51, 52, 54, 57, 59, 60, 61, 62, 63, 64, 65, 68, 69, 71, 72, 77], "allenai": 42, "allow": [21, 30, 31, 40, 42, 43, 46, 48, 51, 52, 56, 57, 65, 71, 74, 75], "allowed_rang": 75, "allowed_set": [75, 76], "almost": 42, "along": [6, 11, 23, 30, 42, 46, 47, 48, 49, 51, 53, 57, 67, 76], "alongsid": [67, 76], "alpha": 3, "alpha_composit": 5, "alpha_imag": 5, "alphabet": 6, "alphabet_config_path": 6, "alreadi": [6, 13, 20, 23, 25, 28, 31, 34, 35, 47, 52], "also": [1, 2, 5, 6, 7, 8, 13, 15, 16, 20, 21, 22, 24, 25, 26, 28, 29, 30, 31, 32, 38, 40, 41, 42, 43, 46, 47, 48, 49, 51, 54, 56, 57, 58, 60, 62, 63, 67, 68, 69, 71, 72, 76], "alter": 65, "altern": [7, 17, 70, 72], "although": 2, "alwai": [7, 20, 40, 63, 65, 69], "amax": [7, 72], "amin": 72, "amount": [43, 49, 52, 54, 57, 59, 63], "an": [1, 2, 3, 5, 7, 8, 12, 13, 15, 16, 18, 19, 21, 22, 23, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 39, 40, 42, 43, 44, 46, 47, 48, 49, 52, 57, 58, 60, 61, 62, 63, 64, 67, 68, 69, 70, 71, 75, 76, 78], "analysi": 1, "analyz": [35, 57], "andrei": 26, "andrew": 26, "angl": 61, "ani": [2, 6, 23, 24, 25, 29, 31, 40, 42, 47, 51, 53, 60, 61, 64, 69, 71, 72], "anim": 26, "annot": 21, "anoth": [2, 8, 26, 30, 31, 34, 37, 52, 62, 67, 70, 71], "answer": [15, 42], "ant": 8, "antialia": [24, 34, 72], "anymor": 61, "anyth": 40, "api": [2, 6, 8, 13, 14, 16, 18, 19, 20, 22, 24, 26, 27, 28, 30, 31, 32, 33, 34, 38, 39, 40, 41, 42, 43, 45, 46, 49, 50, 51, 55, 60, 61, 64, 65, 66, 67, 68, 69, 70, 74, 79], "app": [25, 26, 34, 37, 43, 64, 71], "appear": [5, 10, 11, 15, 23, 25, 26, 35, 78], "append": [6, 10, 11, 30], "appl": [5, 11, 15, 20, 23, 26, 34, 56, 61, 64, 78], "appli": [5, 8, 10, 19, 20, 22, 24, 30, 32, 35, 36, 40, 41, 42, 43, 44, 46, 47, 51, 52, 54, 56, 57, 60, 61, 62, 63, 67, 70, 71, 72, 75], "applic": [8, 11, 23, 24, 26, 28, 30, 62, 70, 72], "approach": [7, 37, 40, 41, 43, 46, 56, 57, 63], "appropri": [7, 23, 25, 35, 63, 72], "approv": 23, "approxim": [37, 43, 46, 48, 70], "ar": [1, 2, 3, 4, 6, 7, 8, 10, 11, 13, 16, 18, 19, 20, 21, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 60, 61, 62, 63, 64, 67, 68, 69, 71, 72, 75, 76, 77], "arbitrari": [6, 21], "arc_challeng": 42, "arc_easi": 42, "architectur": [2, 6, 8, 16, 26, 42, 46, 51, 67, 78], "archiv": 2, "area": 29, "arg": 31, "argmax": [5, 7, 10, 11], "argsort": 8, "argument": [8, 16, 17, 24, 30, 33, 34, 37, 40, 41, 47, 65, 68, 69, 72], "arithmet": 56, "around": 56, "arrai": [6, 7, 8, 15, 18, 19, 21, 24, 32, 67, 70, 72], "arrang": 30, "arrayfeaturetyp": [30, 34], "arteri": 10, "articl": [60, 61, 67], "artist": 34, "as_default": [7, 69, 72], "as_tensor": 5, "as_text": 69, "asarrai": 8, "ask": [20, 23], "assert": [3, 8, 26], "assert_allclos": [67, 69, 70], "asset": [6, 30, 34, 37], "assign": [5, 23, 46], "associ": [1, 2, 30, 75, 76, 77], "assum": [1, 8, 24, 34, 35, 40, 47], "assumpt": 34, "astyp": [5, 6, 7, 8, 15, 33, 34, 72], "asymmetr": [20, 56], "async": 34, "atol": [69, 70], "attach": [37, 63], "attempt": 75, "attent": [31, 46, 47], "attention_mask": 67, "attention_output": 67, "attn_mask": 67, "attr": [3, 19], "attribut": [2, 10, 19, 24], "audio_sample_16bit_mono_16khz": 6, "audiofil": 6, "augment": [18, 49], "author": [7, 11, 23, 26, 29, 31, 42, 75, 76, 77], "auto": 67, "automat": [1, 6, 7, 18, 20, 21, 24, 25, 31, 32, 40, 42, 56, 69, 71, 72, 76], "automodelforcausallm": 11, "autotoken": [11, 42], "avail": [2, 3, 5, 6, 19, 24, 25, 26, 28, 34, 37, 38, 39, 40, 41, 42, 46, 47, 49, 56, 57, 60, 63, 65, 67, 68, 69, 71, 78], "averag": [13, 24, 42, 63], "avg": 35, "avoid": [19, 61, 67], "await": 23, "awar": [51, 56, 63], "ax": 33, "ax_arrai": 72, "axi": [7, 8, 11, 19, 40, 47, 72], "b": [3, 69, 70], "back": [2, 11, 20, 61, 67, 75], "backbon": [37, 46, 49], "backend": [2, 19, 20, 34, 43, 49, 61, 68, 71, 76], "backend_mlprogram": 26, "background": [5, 8, 26, 61, 72, 77, 78], "backward": [41, 57, 60], "bake": 26, "bandwidth": 58, "bar": 71, "bare": 42, "base": [8, 10, 15, 21, 24, 25, 26, 28, 29, 31, 34, 36, 37, 41, 43, 44, 46, 47, 51, 54, 59, 60, 61, 62, 71, 72, 75, 77], "base_model": 37, "base_xl": 61, "baselin": [60, 61], "basic": 25, "batch": [3, 11, 13, 24, 34, 41, 49, 54, 57, 59, 60, 70, 76], "batch_dim": 31, "batch_idx": 47, "batch_siz": [15, 41, 67], "batchnorm": [22, 65], "bath": [29, 34, 66], "bathroom": 29, "beauti": 61, "becaus": [2, 25, 28, 32, 34, 42, 43, 57, 67], "becom": [24, 42, 60], "bedroom": [29, 34, 66], "been": [6, 7, 13, 16, 23, 24, 31, 35, 40, 47, 52, 57, 60, 61, 62, 76], "befor": [8, 13, 19, 23, 24, 25, 26, 30, 35, 41, 42, 43, 47, 48, 57, 67, 69, 70, 72, 77], "begin": [2, 7, 10], "begin_step": 51, "beginn": 2, "behav": 60, "behavior": [16, 19, 23, 28, 68], "behind": 33, "being": [23, 42, 47, 48, 49, 52, 60], "belong": 48, "below": [1, 10, 11, 26, 30, 37, 40, 41, 42, 46, 47, 49, 51, 52, 57, 59, 60, 62, 63, 67], "benchmark": [8, 10, 11, 46], "benefici": [37, 41, 42], "benefit": [24, 40, 42, 43, 49, 54, 57, 59, 60], "bert": [14, 18, 78], "bert_en_uncased_l": 15, "bert_lay": 15, "bert_with_preview_typ": 15, "bertqa": [15, 78], "best": [7, 21, 35, 41, 42, 43, 46, 51, 60, 63, 65, 69, 71, 72], "beta": [13, 16, 31, 62], "better": [2, 7, 13, 34, 41, 42, 43, 46, 51, 57, 60, 61, 63, 65, 67, 70, 72], "between": [2, 6, 10, 20, 26, 30, 31, 37, 38, 42, 43, 51, 56, 63, 65, 67, 68, 71, 72], "bia": [7, 8, 20, 24, 26, 37, 40, 58, 65, 70], "bias": [7, 24], "bias_scal": 40, "bicycl": [5, 78], "big": [60, 61], "billion": 42, "bin": [25, 30], "bind": 19, "binder": 64, "bird": [5, 78], "bisect_model": 30, "bit": [18, 24, 40, 41, 42, 43, 46, 47, 48, 49, 53, 56, 57, 58, 59, 60, 61, 62, 63], "black": 77, "blanch": 24, "blob": [23, 30, 76], "blob_map": 30, "block": [2, 30, 33, 35, 40, 42, 43, 51, 52, 53, 54, 56, 57, 58, 59, 61, 62, 63, 67], "block0": 33, "block_siz": [40, 42, 52, 57], "blockwis": 40, "blog": 67, "blue": [24, 61], "blue_bia": 24, "bnkd": 3, "bnqd": 3, "bnqk": 3, "board": [5, 78], "bodi": [10, 18, 35], "bold": 42, "bool": [19, 33], "boolinputtyp": 19, "both": [2, 6, 7, 24, 28, 32, 35, 37, 40, 42, 56, 58, 60, 63, 76], "bottl": [5, 78], "bottleneck": [24, 49, 58, 59], "bound": [21, 42, 43, 54, 58, 75], "branch": 25, "break": [6, 8, 30, 37, 42], "bridg": 10, "brief": 63, "bright": 61, "bring": 61, "broken": 30, "brows": 25, "browser": 64, "bsd": [23, 29], "bu": [5, 78], "budget": 60, "buffer": [40, 67], "bug": 23, "build": [15, 20, 23, 31, 33, 36, 42, 49, 54, 59, 60, 61, 70], "build_wheel_macos_py38": 25, "builder": [2, 3, 18, 19, 20, 33, 67, 75, 76], "built": [2, 40], "bulk": 10, "bundl": 34, "busiest": 10, "button": [24, 25], "byte": [5, 26, 54], "c": 29, "c4": 42, "cach": [34, 42, 43], "caff": [23, 39], "calcul": [37, 46, 67], "calibr": [43, 46, 47, 51, 58, 60, 61, 62], "calibration_data": 47, "calibration_data_load": 61, "calibration_nsampl": [42, 47, 52, 57, 61], "call": [2, 3, 6, 7, 18, 20, 22, 24, 31, 34, 37, 42, 46, 47, 48, 52, 58, 67, 68, 70, 72, 75], "callabl": 47, "can": [1, 2, 3, 5, 6, 8, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79], "can_allow_multiple_input_shap": 39, "candid": 15, "cannot": [5, 16, 30, 67], "canonic": 22, "canva": 61, "capabl": [6, 43, 69], "captur": [31, 34, 35, 36, 37, 49, 54, 57, 59], "car": [5, 78], "care": 67, "carefulli": 67, "carri": 31, "case": [2, 3, 6, 7, 8, 13, 15, 19, 21, 28, 30, 31, 33, 35, 36, 37, 41, 42, 43, 46, 47, 56, 58, 61, 62, 63, 67, 71, 72, 76], "cast": [38, 58, 72], "castl": 61, "cat": [5, 10, 11, 61, 78], "cat_dog": 5, "categor": 76, "categori": [5, 26, 63], "categorical_crossentropi": 76, "categoricalcrossentropylosslay": 76, "causal_mask": 67, "cce": 76, "cell": 6, "center": [2, 48], "centernet": [46, 49], "centric": 2, "centroid": [46, 47, 48], "cepstral": 6, "certain": [30, 40, 51, 52, 53, 65, 78], "chair": [5, 78], "chanc": 65, "chang": [6, 11, 20, 23, 28, 32, 34, 35, 39, 40, 52, 57, 61, 62, 67, 75, 76], "change_input_output_tensor_typ": 30, "channel": [7, 24, 34, 35, 40, 47, 56, 57, 58, 59, 60, 62, 63, 65], "channel_axi": 47, "charact": 6, "characterist": [41, 63], "chart": 71, "check": [7, 18, 20, 23, 39, 43, 47, 63, 69, 70, 72, 75], "checkout": 47, "checkpoint": [6, 49], "checkpoint_dir": 6, "checkpoint_fil": 69, "chen": 26, "cherish": 11, "chieh": 26, "child": 30, "child_op": 30, "chip": [58, 59, 60, 62, 67], "choos": [5, 11, 26, 43, 46, 54, 57, 65, 68, 78], "chosen": [37, 51, 53, 57, 76], "chunk": [6, 30, 42], "ci": 25, "citi": 10, "ckpt": 69, "clarif": 23, "class": [0, 1, 3, 5, 7, 10, 18, 21, 24, 26, 28, 29, 30, 31, 32, 33, 35, 36, 37, 39, 41, 57, 62, 63, 64, 65, 67, 70], "class_id": 8, "class_label": [1, 8, 26, 76], "class_nam": 19, "classic": 73, "classif": [1, 24, 37, 49, 59, 70, 77], "classifi": [4, 8, 14, 24, 26, 28, 37, 66, 69, 73, 74, 78], "classifier_config": [1, 7, 8, 26], "classifierconfig": [1, 7, 8, 26], "classlabel": [1, 7, 8, 26], "clear": [61, 67], "clear_devic": 69, "clear_sess": 76, "click": [5, 7, 8, 15, 21, 24, 25, 26, 49, 51, 54, 59, 72, 77, 78], "clip": [58, 72], "clone": 25, "close": [7, 8, 24, 43], "closer": [13, 46], "cluster": [40, 46, 47, 48, 65], "cluster_dim": [40, 47, 48], "cluster_dtyp": 47, "cmake": 25, "cnn": 60, "coco": 49, "code": [1, 2, 3, 5, 6, 7, 8, 10, 11, 13, 18, 19, 21, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 41, 42, 47, 52, 57, 60, 61, 63, 67, 71, 72, 78], "coeffici": 6, "collect": [5, 26, 57, 60, 76, 78], "color": [5, 23, 24, 72, 78], "color_layout": 24, "colorimag": 24, "colorlayout": 24, "coloroutput": 24, "colorspac": [76, 77], "colortocolormodel": 24, "column": 25, "com": [5, 7, 8, 15, 23, 26, 77, 78], "combin": [0, 10, 22, 40, 43, 72], "combined_adpater_model": 37, "come": [7, 42], "comfort": 23, "command": [2, 5, 8, 10, 11, 23, 25, 26, 31, 61], "comment": [23, 26], "commerci": 61, "commit": [25, 52], "common": [2, 6, 8, 20, 30, 37, 47, 53, 67, 73], "commonli": [34, 49], "commun": [19, 23, 34], "compact": 40, "compar": [9, 16, 20, 26, 30, 33, 42, 54, 59, 61, 63, 67, 68, 69], "comparison": [2, 16, 33, 42, 67], "compat": [28, 69], "compil": [2, 18, 19, 21, 29, 30, 43, 76], "compiled_model": 30, "compiled_model_path": [30, 34], "compiledmlmodel": [20, 30, 34, 37], "complet": [3, 10, 19, 23, 71], "complex": 2, "complic": [3, 35], "compon": [2, 6, 16, 24, 29], "compos": [5, 18, 77], "composit": [2, 4, 20, 33, 73], "compress": [0, 18, 20, 39, 44, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 62], "compressed_8_bit_model": 57, "compressed_mlmodel": [30, 47, 61, 63], "compressed_model": [41, 42, 57, 61], "compressed_model_a8": 57, "compressed_torch_model": 63, "compression_typ": 40, "compressor": [41, 52], "compressor_config": 61, "compris": 2, "comput": [2, 5, 7, 8, 19, 22, 33, 35, 41, 42, 43, 46, 47, 49, 51, 52, 54, 56, 57, 58, 59, 60, 61, 63, 65, 67, 71], "computation": 46, "compute_device_usag": 30, "compute_plan": 30, "compute_precis": [16, 20, 38, 71, 72], "compute_unit": [7, 8, 20, 28, 30, 34, 38, 67], "computeunit": [8, 20, 28, 30, 34, 38, 67], "conc_func": 70, "concat": 11, "concaten": 6, "concept": 31, "concis": 2, "conclud": 42, "concret": 68, "conda": [7, 72], "condit": [10, 13, 35, 36], "conduct": [23, 57], "confer": [34, 64], "config": [30, 34, 41, 42, 46, 47, 49, 51, 52, 54, 56, 57, 59, 60, 61, 63, 72], "config_dict": [41, 60], "configur": [6, 23, 25, 29, 30, 34, 40, 42, 48, 52, 57, 60, 63, 71], "confirm": [8, 75, 76], "connect": [2, 61, 64, 76], "consecut": 54, "consid": [20, 21, 26, 30, 31, 37, 41, 43, 60, 61], "consider": [20, 34, 42, 43, 46, 59], "consist": [2, 6, 8, 29, 31, 36, 72], "const": [2, 19, 30, 33, 69], "const_elimin": 22, "constant": [2, 22, 33, 52, 57], "constantsparsityschedul": [51, 52], "constexpr_blockwise_shift_scal": 40, "constexpr_lut_to_dens": 40, "constrain": [2, 58], "constraint": [21, 53, 57, 71], "construct": [3, 5, 6, 10, 18, 20, 30, 33, 35, 37, 40, 41, 47, 65, 67, 77], "consum": [1, 2, 71], "consumpt": [20, 24, 39, 43, 64], "contain": [6, 10, 16, 24, 28, 30, 32, 33, 37, 52, 53, 62, 76, 78], "content": [2, 6], "context": [3, 10, 15, 19], "contigu": 42, "continu": [2, 3, 10, 20, 25, 33, 68], "contrast": 71, "contribut": [0, 18, 22], "contributor": 23, "control": [2, 10, 13, 22, 24, 30, 34, 35, 36, 47, 57, 58, 71], "controlflownet": 35, "controlflownet2": 35, "conv": [21, 35, 40, 47], "conv1": [36, 47], "conv2d": [21, 35, 36, 52, 76], "conv3": 47, "conv_1": 40, "conv_2": 40, "conveni": [1, 13, 24, 30, 34, 63, 67, 70], "converg": 47, "convers": [1, 2, 6, 7, 8, 9, 12, 16, 19, 22, 24, 25, 26, 29, 30, 31, 33, 37, 39, 43, 47, 52, 64, 67, 69, 70, 71, 72, 73], "convert": [1, 2, 4, 18, 21, 22, 27, 29, 32, 34, 35, 36, 37, 41, 48, 49, 56, 61, 63, 64, 65, 66, 68, 71, 73, 78, 79], "convert_double_to_float_multiarray_typ": 30, "convert_keras_to_mlmodel": 76, "convert_neural_network_spec_weights_to_fp16": 39, "convert_neural_network_weights_to_fp16": 39, "convert_to": [16, 17, 21, 24, 26, 28, 38, 57, 68, 69, 70, 71], "converted_model": 67, "converted_model_kvcach": 67, "convnet": 43, "convolut": [8, 18, 35, 36, 52, 60, 65, 71, 76, 77], "copi": [5, 15, 23, 24, 34, 67], "copyfrom": 77, "copytre": 34, "core": [1, 3, 5, 6, 12, 14, 15, 16, 17, 18, 19, 22, 23, 24, 26, 27, 28, 30, 31, 32, 34, 35, 36, 37, 38, 40, 42, 43, 48, 49, 51, 53, 56, 58, 60, 61, 65, 66, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79], "coreltool": 51, "coreml": [5, 6, 7, 8, 13, 15, 20, 24, 26, 30, 33, 39, 41, 42, 46, 47, 49, 51, 52, 56, 57, 60, 62, 63, 72, 75, 78], "coreml_dict_nam": 8, "coreml_input": [7, 10], "coreml_model": [7, 24, 27, 57, 66, 79], "coreml_model_fil": 7, "coreml_model_path": 76, "coreml_out": [69, 70, 72], "coreml_out_dict": [8, 69, 70], "coreml_out_fp32": 72, "coreml_output": 7, "coreml_output_tensor": 72, "coreml_output_tensor_fp32": 72, "coreml_pred_dict": 7, "coreml_predicted_class_label": 7, "coreml_prob_dict": 8, "coreml_updatable_model_path": 76, "coreml_update_st": 67, "coremlcompil": 2, "coremltool": [0, 1, 2, 3, 5, 6, 7, 8, 10, 11, 13, 15, 16, 17, 19, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 37, 38, 39, 40, 41, 42, 43, 46, 47, 49, 51, 52, 54, 56, 57, 59, 60, 61, 62, 63, 64, 65, 66, 67, 69, 70, 71, 72, 73, 75, 76, 77, 78, 79], "coremlweightmetadata": 30, "correct": [7, 20, 26, 30, 34, 35, 42, 47, 56, 63], "correctli": [5, 7, 18, 26, 35, 40, 69, 70, 75], "correl": 42, "correspond": [1, 3, 6, 20, 24, 30, 40, 48, 72], "cost": [30, 34], "could": [24, 34, 37, 40, 41, 42, 43, 53], "count": 65, "counterpart": 57, "countri": 10, "coupl": 41, "cours": [2, 26, 52, 75], "cover": [40, 43], "coverag": [13, 31], "cow": [5, 78], "cp38": 25, "cpu": [5, 7, 8, 20, 28, 34, 38, 42, 43, 54, 59, 64, 71, 72], "cpu_and_gpu": [28, 67], "cpu_and_n": 28, "cpu_onli": [8, 20, 28, 34, 61], "cpuonli": [20, 71], "crash": 21, "creat": [5, 6, 7, 8, 11, 13, 15, 16, 18, 19, 20, 23, 24, 25, 26, 28, 29, 31, 34, 37, 47, 48, 53, 57, 72], "create_keras_base_model": 76, "creation": [1, 34, 64], "creativecommon": 78, "cross": 76, "crossattndownblock2d": 61, "crossentropyloss": 42, "crucial": 7, "cr\u00e8che": 78, "csv": [29, 30, 66], "ct": [1, 3, 5, 6, 7, 8, 10, 11, 13, 15, 16, 17, 19, 20, 21, 22, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 37, 40, 41, 42, 47, 52, 57, 60, 61, 62, 63, 65, 66, 67, 69, 70, 71, 72, 78, 79], "ctc": 6, "cto": [30, 47, 57, 61, 62, 63], "cuda": 52, "current": [2, 13, 19, 23, 25, 31, 39, 43, 68, 75, 76], "curv": 63, "custom": [2, 4, 5, 30, 31, 47, 52, 57, 70, 73], "custom_lut": 65, "custom_mil_op": 19, "custom_tf_op": 19, "custom_topk": 19, "customdens": 70, "customtopk": 19, "cute": 15, "cvnet": [49, 59], "cvpixelbuff": 24, "d": [18, 48, 52, 61, 76], "dai": [11, 61], "daisi": [1, 8, 24, 26], "dat": 47, "data": [5, 8, 10, 13, 18, 23, 24, 26, 29, 30, 31, 32, 34, 35, 36, 40, 41, 43, 44, 46, 47, 49, 51, 53, 54, 58, 59, 61, 62, 64, 65, 66, 67, 70, 72, 76], "data_and_predict": 30, "data_load": 63, "data_util": 47, "dataload": [42, 47, 52, 57, 60], "dataset": [8, 10, 26, 42, 46, 49, 51, 52, 56, 59, 62, 76, 77], "datatyp": [76, 77], "de": [42, 57], "dead_code_elimin": 22, "deal": 30, "dearli": 11, "debug": [28, 30, 38], "decent": [61, 63], "decis": 67, "decod": [6, 8, 10, 11, 26, 42, 61], "decompos": 18, "decompress": [43, 49, 59, 62], "decor": [3, 19, 33], "decoupl": [2, 16], "decreas": [42, 43, 56], "dedupl": 37, "deep": [0, 2, 68, 73], "deeplab": [13, 31], "deeplabv3": 5, "deeplabv3_resnet101": 5, "deepspeech": [14, 18, 69], "def": [2, 3, 5, 7, 8, 10, 19, 20, 21, 24, 31, 33, 34, 35, 36, 37, 42, 61, 65, 67, 70, 72, 75, 76], "default": [6, 7, 8, 10, 17, 24, 26, 27, 28, 30, 37, 38, 40, 41, 47, 48, 52, 56, 57, 58, 65, 67, 69, 71, 72, 75, 77, 79], "default_class_label": [75, 77], "default_function_nam": 37, "default_palett": 40, "default_prun": 40, "default_s": 21, "default_token": 10, "defaultinput": 19, "defaultlabel": 75, "defaultvalu": 75, "defin": [1, 2, 3, 5, 7, 13, 15, 18, 20, 21, 26, 28, 29, 31, 33, 35, 36, 37, 42, 47, 57, 67, 69, 71, 78], "definit": [37, 69], "degrad": [42, 46, 63, 65], "del": 3, "delet": 34, "demand": 47, "demo_util": 6, "demonstr": [5, 7, 10, 15, 18, 26, 28, 29, 34, 35, 57, 65, 67, 69, 70, 72, 75, 76, 78], "denot": [10, 62], "dens": [6, 40, 42, 46, 52, 53, 54, 62, 70, 76], "dense1": 40, "dense_1": 76, "dense_1__activation__": 76, "dense_1__activation___output": 76, "dense_1_output": 76, "dense_2": [65, 76], "dense_2__activation__": 76, "dense_2_output": 76, "depend": [8, 10, 13, 25, 34, 35, 36, 42, 43, 46, 49, 51, 54, 59, 60, 63, 65, 71, 72], "deploi": [30, 37, 39, 42, 61, 67, 68, 72], "deploy": [16, 28, 32, 38, 48, 53, 61, 67], "deprec": [7, 20, 72], "depth": 78, "depthestim": 78, "depthwis": 65, "depthwiseconv": 65, "dequant": [58, 65], "deriv": 46, "desc": 37, "descent": [51, 76], "describ": [1, 2, 4, 5, 8, 15, 16, 20, 21, 23, 25, 26, 28, 33, 36, 39, 40, 41, 42, 47, 52, 56, 57, 65, 71], "descript": [2, 6, 8, 15, 18, 19, 20, 21, 22, 23, 26, 34, 61, 63, 68, 71, 72, 75, 76, 77], "deselect": 57, "design": [8, 30], "desir": [31, 41, 51, 60, 63, 72, 75], "detach": [8, 11, 67], "detail": [0, 7, 8, 10, 13, 16, 19, 20, 21, 23, 24, 26, 28, 29, 32, 33, 38, 40, 43, 47, 52, 54, 57, 59, 61, 63, 64, 67, 69, 70], "detect": [22, 26, 40, 49], "detectinghumanbodyposesinanimag": 78, "determin": [11, 16, 21, 23, 24, 35, 44, 47, 71], "dev": [6, 15, 70], "develop": [13, 23, 28, 31, 34, 64], "deviat": [5, 8, 24, 46, 51], "devic": [2, 19, 20, 21, 24, 26, 30, 34, 37, 42, 49, 52, 54, 59, 60, 63, 64], "di": 11, "diagram": 34, "dialect": 33, "dict": 78, "dictionari": [1, 5, 19, 30, 47, 52], "dictionarytyp": 8, "didn": 17, "differ": [6, 18, 20, 21, 24, 26, 30, 34, 35, 37, 38, 42, 43, 46, 47, 51, 52, 57, 58, 60, 61, 62, 63, 65, 69, 72], "differenti": [49, 51, 57, 62], "difficult": [7, 72], "diffus": [30, 34, 61], "digest": 67, "digit": 76, "digitprob": 76, "digitprobabilities_tru": 76, "dim": [10, 11, 31, 36, 40, 41], "dimens": [6, 8, 10, 11, 18, 20, 31, 34, 47, 58, 75, 77], "dimension": 77, "diningt": [5, 78], "dir_path": 7, "direct": 2, "directli": [2, 3, 6, 7, 12, 16, 20, 28, 29, 30, 35, 36, 37, 39, 40, 42, 43, 47, 56, 60, 61, 63, 67, 69, 70, 72], "directori": [2, 7, 20, 26, 34, 68, 70], "dirs_exist_ok": 34, "disabl": [3, 8, 13, 71], "disappear": 42, "discret": [40, 41, 43], "discuss": [23, 41], "disk": [2, 24, 30, 34, 41, 43, 47, 60, 65, 69], "displai": [1, 8, 21, 24, 26, 29], "display_segment": 5, "dispos": 3, "dist": 25, "distanc": 46, "distilbert_model": 15, "distilberttoken": 15, "distinguish": 2, "distribut": [1, 6, 25, 71], "divid": [8, 40, 53, 63], "dkm": [41, 46, 47, 60], "dkmpalett": [41, 46, 47, 60, 62], "dkmpalettizerconfig": [47, 60], "do": [2, 8, 10, 11, 20, 21, 22, 26, 28, 30, 33, 34, 37, 39, 40, 43, 47, 57, 60, 63, 67, 69, 71, 72], "do_quant": 65, "doc": [23, 54, 59], "doc_str": 19, "document": [3, 5, 21, 26, 64], "doe": [6, 25, 26, 28, 31, 32, 34, 37, 40, 42, 56, 65, 71, 72], "doesn": [21, 23, 42], "dog": [5, 15, 78], "domain": 78, "domin": 26, "don": [3, 6, 23, 28, 30, 33, 34, 35, 36, 67, 78], "done": [41, 47, 51, 62, 63, 67], "doubl": [5, 15, 18, 24, 26, 76, 78], "down": [19, 25, 42, 43, 54, 59, 61, 65], "down_block": 61, "down_block_res_sampl": 61, "download": [2, 5, 6, 25, 49, 51, 54, 59, 69, 70, 78], "download_file_and_unzip": 7, "drag": [5, 24, 26, 78], "dram": 43, "dramat": [42, 61], "draw": [18, 77], "drop": [49, 60, 63], "dropdown": 23, "dropout": [8, 13, 76], "dtype": [10, 11, 15, 19, 24, 31, 33, 34, 41, 42, 47, 57, 63, 67, 70], "due": [47, 52, 58, 61], "dummi": [11, 41], "dump": [5, 78], "duplic": 23, "dure": [1, 3, 7, 16, 18, 20, 21, 22, 24, 26, 34, 35, 37, 39, 40, 46, 49, 51, 57, 58, 59, 60, 64, 67, 68, 69, 71, 72], "dynam": [20, 31, 42, 57, 71], "dynamic_shap": [11, 31], "e": [11, 13, 31, 41, 43, 47, 54, 59, 62, 63], "e8bea591e72c": 75, "each": [2, 5, 6, 8, 18, 20, 22, 23, 24, 30, 33, 34, 37, 40, 41, 42, 46, 47, 48, 49, 51, 52, 53, 56, 58, 59, 60, 67, 71, 72, 75, 76, 77], "earlier": 13, "eas": 59, "easi": 19, "easier": [5, 60], "easiest": [36, 46], "easili": [2, 6, 20, 30, 33, 46, 63], "edg": 2, "edit": [16, 30], "educ": 64, "effect": [47, 57, 63], "efficaci": 51, "effici": [2, 12, 13, 18, 19, 22, 24, 32, 37, 42, 53, 56, 63, 67], "eight": 10, "einstein": 3, "einsum": 3, "either": [2, 8, 13, 16, 25, 26, 28, 32, 33, 38, 42, 47, 51, 52, 53, 62, 63, 68, 71], "element": [21, 30, 42, 46, 47, 48, 51, 52, 54, 57, 65], "elicit": 10, "elig": 72, "elimin": [22, 38], "els": [7, 31, 35, 63, 72, 75], "elu": 3, "emb": [40, 77], "embed": [2, 8, 11, 18, 31, 42, 49, 51, 54, 59, 67], "embed_s": 67, "embedding_model": 77, "embedding_path": 77, "embedding_spec": 77, "emploi": [6, 28, 59], "empti": [18, 30, 75, 77], "enabl": [2, 20, 24, 30, 32, 34, 35, 43, 47, 48, 57, 60, 65, 69], "enable_per_channel_scal": [42, 47], "encapsul": 29, "encod": [1, 15, 35, 61, 67], "encoder_hidden_st": 61, "encount": [3, 19, 35, 54], "encourag": 23, "end": [6, 7, 10, 34], "end_step": 67, "end_step_dim": 67, "endpoint": 19, "enforc": [53, 71], "eng": 75, "engin": [21, 28, 30, 34, 38, 41, 42, 43, 49, 54, 57, 58, 59, 60, 63, 64, 71, 72], "english": 7, "enhanc": [23, 68], "enough": [60, 71], "ensembl": 64, "ensur": [3, 7, 8, 13, 21, 23, 26, 40, 41, 57, 61, 70], "enter": [15, 23], "entir": [2, 6, 35, 48, 71], "entri": [26, 34, 40, 41, 48], "entropi": 76, "enumer": [24, 26, 28, 31, 47, 57, 60], "enumerated_shap": 21, "enumeratedshap": [20, 21, 31, 32], "env": [7, 25, 72], "environ": [7, 11, 31, 72], "eo": 10, "epoch": [47, 52, 60, 76], "epsilon": 33, "equal": [60, 61, 71], "equat": [2, 3, 24, 58], "equival": [24, 59], "error": [3, 5, 23, 33, 42, 48, 56, 57, 63, 72, 75, 76], "especi": [2, 28, 38, 42, 59], "essenti": 3, "establish": [2, 10, 11], "estim": [30, 42, 56, 57, 63, 77, 78], "estimated_cost": 30, "etc": [13, 26, 29, 40, 41, 42, 44, 47, 51, 61, 67], "eval": [5, 8, 10, 11, 13, 21, 28, 31, 34, 37, 41, 47, 57, 60, 67], "evalu": [6, 7, 18, 19, 24, 26, 34, 42, 46, 51], "evaluate_classifi": 30, "evaluate_regressor": 30, "evaluate_transform": 30, "even": [23, 31, 34, 36, 41, 42, 43, 59, 60, 63, 71, 72], "ever": 71, "everi": [11, 47, 52, 54], "evolut": 16, "evolv": [2, 3], "exact": [40, 57], "exactli": [30, 33, 35, 51, 72], "examin": [23, 24, 30], "exampl": [0, 5, 6, 7, 8, 9, 10, 11, 13, 15, 16, 17, 19, 20, 21, 22, 23, 25, 31, 32, 33, 35, 36, 40, 42, 43, 48, 52, 53, 63, 65, 70, 71, 73, 74, 75, 76, 77], "example_imag": [1, 24, 26], "example_input": [8, 13, 20, 21, 24, 28, 31, 34, 41, 47, 57, 60, 63], "example_input_id": 11, "except": [2, 6, 21, 30, 33, 47], "exclud": 24, "execut": [2, 7, 8, 9, 16, 20, 22, 24, 26, 28, 30, 31, 34, 43, 61, 65], "exercis": 60, "exist": [2, 7, 20, 21, 23, 37, 76], "expand": 71, "expand_dim": [7, 8, 72], "expect": [1, 3, 6, 8, 24, 26, 41, 59, 70, 72, 76, 77], "expected_output": 30, "expens": [34, 46], "experi": [35, 36, 43, 46, 51, 52, 60, 61, 63, 65, 71], "experiment": [10, 35, 51, 56, 57, 62, 63], "explain": [10, 35, 41], "explan": 57, "explicit": [2, 12, 67, 71], "explicitli": [40, 67, 71], "explor": [6, 42, 51, 56, 60, 61], "export": [5, 6, 12, 13, 18, 20, 28, 39, 41, 42], "export_dir": 6, "exported_model": 31, "exported_program": [5, 8, 11, 13], "exportedprogram": [8, 31, 68], "express": [2, 3, 21, 31, 40, 58], "extend": [11, 51, 65, 77], "extens": [5, 8, 16, 28, 34, 57, 60], "extern": 30, "extra": [19, 38, 42, 57, 61, 67], "extract": [5, 6], "extractal": 7, "extractor": [37, 77], "f": [7, 31, 35, 36, 47, 67, 69, 72], "face": 67, "facebook": 42, "fact": 3, "factor": [34, 42, 44, 46, 51, 54, 58, 63], "fair": 67, "faithfulli": 72, "fake": 57, "fakequant": 57, "fals": [3, 11, 15, 19, 33, 42, 67, 69, 76], "famili": [11, 64], "familiar": 25, "faq": 0, "far": 6, "farmer": 11, "fast": [21, 28, 38, 42, 72], "fast_neural_style_wav": 72, "faster": [7, 34, 41, 43, 59, 62, 63, 67], "fastest": [56, 63], "fastpredict": 34, "father": 11, "fc": [52, 67], "featur": [0, 1, 2, 6, 12, 18, 20, 21, 23, 24, 26, 29, 32, 34, 37, 40, 47, 67, 72, 77, 78], "feature_nam": 21, "featuretypes_pb2": [30, 76], "fed": [6, 10, 52, 67], "feed": [2, 30, 47, 61, 69, 76], "feed_dict": [7, 69, 72], "feet": 29, "few": [2, 6, 7, 33, 34, 40, 41, 42, 43, 44, 46, 51, 53, 56, 57, 63, 71, 76], "fewer": [42, 58], "field": [8, 15, 40], "field_nam": 40, "fig": 72, "figur": [2, 21, 24, 25, 37, 48, 67, 78], "file": [2, 5, 7, 8, 15, 16, 20, 23, 25, 26, 27, 28, 29, 30, 34, 47, 49, 51, 52, 54, 57, 59, 61, 64, 69, 70, 72, 76, 78, 79], "filename_tensor_nam": 69, "fill": [15, 23, 67], "filter": 23, "final": [2, 6, 22, 34, 41, 46, 47, 51, 52, 57, 60, 61, 63, 67, 69, 70, 76], "final_lay": 57, "finalized_model": 41, "find": [2, 7, 13, 23, 26, 28, 33, 38, 43, 47, 51, 52, 63, 65], "finder": [5, 15, 26, 78], "fine": [18, 30, 37, 42, 43, 49, 51, 52, 54, 58, 62, 64, 71, 76], "finer": 65, "finish": [5, 26], "finishmysent": 10, "finit": [20, 21], "first": [3, 5, 7, 8, 16, 21, 23, 24, 26, 28, 29, 31, 33, 34, 37, 39, 40, 41, 42, 57, 63, 64, 67, 69, 70, 72, 77], "first_lay": 57, "fisher": [42, 46], "fit": [29, 66], "five": [33, 35, 52], "fix": [6, 7, 8, 10, 21, 23, 34, 61, 69], "flag": [6, 7, 19, 47, 71], "flatten": [7, 70, 72, 76], "flatten_1_output": 76, "flexibl": [2, 4, 5, 6, 16, 69, 71, 73], "flexible_shape_util": 21, "float": [2, 18, 20, 32, 38, 39, 40, 41, 43, 48, 53, 58, 67, 71, 72, 77, 78], "float16": [16, 24, 30, 32, 41, 43, 46, 49, 51, 54, 56, 59, 60, 61, 62, 63, 67, 72], "float16computeprecis": 71, "float32": [6, 7, 8, 11, 16, 20, 24, 30, 32, 33, 34, 42, 49, 57, 67, 69, 70, 71, 72, 77], "float_v2_1": 69, "floatinputtyp": 19, "flow": [2, 8, 10, 13, 35, 36, 47, 57, 61, 63], "flush": 6, "fly": [43, 62], "fname": 7, "focu": 6, "focus": 67, "folder": [7, 8, 25, 34, 78], "follow": [1, 2, 3, 5, 6, 7, 8, 10, 11, 13, 15, 16, 17, 18, 19, 20, 21, 23, 24, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 47, 48, 52, 53, 56, 57, 58, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 74, 76, 77, 78], "fontsiz": 72, "food": [26, 61], "footprint": [34, 43, 63, 64], "forev": 11, "forget": 40, "fork": [23, 25], "form": [1, 2, 5, 37, 43, 46, 47], "format": [0, 2, 7, 8, 9, 10, 12, 16, 18, 19, 20, 21, 23, 26, 27, 28, 34, 38, 39, 40, 41, 47, 48, 53, 54, 57, 62, 63, 64, 66, 69, 71, 75, 78, 79], "former": 56, "formula": 24, "forward": [5, 10, 21, 24, 31, 35, 36, 37, 41, 42, 47, 51, 57, 60, 61, 67], "found": [40, 57], "foundat": [2, 16], "four": [35, 63], "fourth": 65, "fp16": [19, 30, 40, 41, 42, 67], "fp16computeprecis": 72, "fp32": [19, 30, 32, 33], "fpath": 7, "fraction": [37, 53], "fragment": 35, "framework": [1, 2, 3, 23, 24, 25, 26, 30, 33, 34, 67, 68, 71, 73], "free": [10, 41, 43, 46, 51, 61, 62], "freez": [28, 69], "freeze_graph": 69, "frequenc": 6, "frequent": 20, "friendli": 47, "from": [0, 1, 2, 3, 5, 6, 7, 10, 11, 13, 15, 16, 18, 19, 20, 23, 24, 26, 29, 30, 32, 33, 35, 36, 37, 40, 41, 42, 43, 46, 47, 48, 49, 51, 52, 53, 54, 57, 58, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 72, 73, 75, 76, 77, 78], "from_asset": 30, "from_dict": [41, 42, 47, 52, 57, 60, 63], "from_memori": 30, "from_numpi": [8, 40], "from_pretrain": [3, 10, 11, 15, 42], "from_typ": 30, "from_yaml": [47, 52, 57], "fromarrai": [5, 34], "frontend": [2, 3, 19, 26], "frontend_tensorflow2": 26, "frozen": [7, 18, 20, 28, 68], "frozen_graph": 28, "frozen_graph_fil": 69, "fruit": 61, "full": [18, 29, 30, 34, 39, 43, 47, 51, 57, 63, 65, 78], "fulli": [2, 20, 28, 36, 39, 40, 43, 52, 59, 61, 76], "function": [2, 5, 6, 19, 20, 23, 24, 30, 31, 33, 35, 36, 37, 41, 42, 43, 46, 47, 63, 67, 68, 76], "function_nam": [30, 37], "further": [2, 26, 42, 43, 48, 52, 53, 56, 58, 59, 61, 71], "fuse": [22, 43], "fuse_elementwise_to_batchnorm": 22, "futur": [2, 16, 62], "futurewarn": 5, "fx": 57, "g": [7, 13, 31, 41, 43, 47, 54, 59, 62, 63, 72], "gain": [42, 43, 59, 60, 63], "galleri": 26, "gb": [30, 42, 61], "gelu": 70, "gelu_tanh_activ": 70, "gener": [2, 8, 10, 13, 24, 26, 28, 31, 32, 33, 34, 35, 36, 37, 40, 41, 42, 43, 46, 51, 52, 54, 56, 61, 64, 65, 67, 68, 69, 70, 72], "generated_tensor": 10, "generated_text": 10, "generated_text_torch": 10, "german": 61, "get": [0, 1, 2, 5, 6, 7, 13, 16, 18, 19, 20, 24, 25, 28, 29, 33, 34, 37, 41, 42, 43, 47, 51, 57, 60, 61, 62, 63, 67, 69, 71, 76, 78], "get_allowed_shape_rang": 39, "get_c4": 42, "get_calibration_data": 47, "get_compiled_model_path": 34, "get_compute_device_usage_for_mlprogram_oper": 30, "get_concrete_funct": 70, "get_custom_layer_nam": 39, "get_dataload": 47, "get_estimated_cost_for_mlprogram_oper": 30, "get_oper": [7, 69, 72], "get_peft_model": 37, "get_shap": [7, 69], "get_spec": [8, 16, 20, 29, 30, 34, 72, 77], "get_torch_model": 47, "get_weights_metadata": [30, 47, 52], "gfile": 69, "github": [13, 20, 23, 25, 26, 31, 77, 78], "give": [42, 44, 49, 59, 60, 63, 71], "given": [5, 10, 19, 26, 40, 43, 46, 51, 63, 67, 76, 78], "glass": 61, "glm": 2, "global": [24, 47], "global_config": [30, 41, 42, 47, 52, 57, 60, 61, 63], "global_variables_initi": 69, "go": [6, 25, 37, 40, 42, 43, 51, 60, 67, 75], "goal": [43, 60, 63], "goe": [54, 63], "golden": 7, "golden_retriever_carlo": 7, "good": [11, 23, 26, 32, 42, 43, 46, 49, 60, 61, 63, 72], "googl": 70, "googleapi": [7, 8, 26], "googlecreativelab": 77, "got": 40, "gpt": 61, "gpt2": 10, "gpt2lmheadmodel": 10, "gpt2token": 10, "gptq": [57, 62], "gpu": [2, 20, 28, 34, 38, 42, 43, 59, 64, 67, 71, 72], "grab": 3, "gradient": [42, 43, 46, 47, 51, 57, 63, 76], "gradual": [13, 31, 52], "grain": [30, 52, 58], "granular": [2, 40, 41, 42, 46, 47, 52, 56, 57, 60, 61, 63], "graph": [2, 4, 6, 18, 20, 26, 28, 31, 32, 33, 35, 36, 40, 57, 68, 71, 72, 73], "graph_def": 69, "graph_def_fil": 69, "graph_pass": 22, "graphdef": [7, 69, 72], "graphic": [2, 28, 34, 38], "graphmodul": 57, "grayscal": [20, 38, 76, 77], "grayscale_float16": 24, "great": [59, 64], "greater": [19, 30, 52], "greedili": 11, "green": 24, "green_bia": 24, "group": [40, 42, 47, 48, 49, 53, 60, 62, 63], "group_axi": 40, "group_siz": [40, 42, 47, 48, 60, 61, 63], "grow": 23, "guarante": [21, 71], "guid": [0, 20, 22, 25, 43, 51, 52, 57, 64, 70], "guidelin": 23, "gz": 7, "h": [24, 61], "h5": [28, 68, 70, 76], "h5py": 26, "ha": [2, 6, 7, 13, 16, 19, 20, 21, 23, 24, 28, 30, 31, 33, 34, 35, 36, 37, 40, 41, 46, 47, 52, 57, 60, 67, 71, 76, 77], "half": [43, 61, 65], "hand": [35, 49, 53, 59, 60, 76], "handl": [2, 3, 6, 7, 18, 40, 65, 69], "handwriten": 76, "handwritten": 76, "happen": [10, 35, 43, 49], "hard": [11, 40, 72], "harder": 21, "hardwar": [19, 20, 43, 44, 53, 54, 56, 57, 58, 59, 71, 72], "has_custom_lay": 39, "hash": 37, "have": [6, 7, 8, 18, 20, 21, 23, 24, 26, 28, 30, 31, 33, 34, 35, 37, 40, 42, 43, 46, 47, 48, 51, 52, 57, 60, 61, 62, 65, 67, 71], "hdf5": [28, 68, 70], "he": 11, "head": [25, 37, 67], "heart": 77, "heavi": 6, "height": [24, 34, 76, 77], "hello": 15, "help": [8, 23, 38, 41, 42, 43, 52, 54, 56, 57, 58, 60, 64, 67], "henc": [13, 42, 67, 70], "here": [11, 13, 19, 30, 31, 33, 37, 40, 41, 42, 43, 44, 47, 52, 54, 57, 59, 60, 61, 67], "hesit": 23, "hessian": [46, 51], "hexcod": 78, "hf": 11, "hi": 11, "hidden": 6, "hidden_st": 61, "high": [29, 43, 48, 63, 72], "higher": [7, 16, 20, 42, 43, 46, 47, 51, 54, 60, 63, 71, 72], "highest": [7, 11], "highli": [41, 43], "highlight": [15, 20, 25, 61], "hint": 34, "hit": [13, 31], "honor": 40, "hood": 6, "hook": 52, "hors": [5, 78], "host": 26, "hour": 60, "hous": [29, 66], "housepric": [29, 34, 66], "how": [1, 2, 5, 7, 8, 10, 11, 13, 18, 19, 20, 21, 23, 24, 25, 26, 28, 29, 30, 31, 34, 37, 40, 41, 42, 46, 47, 51, 52, 60, 61, 63, 67, 69, 70, 71, 72, 76, 78], "howard": 26, "howev": [1, 5, 7, 8, 20, 21, 28, 31, 32, 34, 35, 37, 38, 40, 41, 42, 43, 46, 47, 48, 56, 57, 59, 60, 65, 67, 68, 71, 72], "http": [7, 8, 15, 23, 26, 70, 77], "hub": [5, 70], "hug": 67, "huggingfac": [15, 37, 42, 61], "husband": 11, "hyperparamet": 67, "hypothet": 63, "i": [0, 1, 2, 3, 5, 6, 7, 8, 10, 11, 12, 13, 15, 16, 19, 21, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 60, 61, 62, 63, 65, 67, 68, 69, 70, 71, 72, 73, 75, 76, 77, 78], "i32": 33, "ic": 61, "id": [7, 11, 76], "idea": [23, 43, 46, 51], "ideal": 23, "ident": [2, 43], "identifi": [1, 30], "idx": [7, 8, 57, 60], "ignor": [5, 10, 37], "illustr": [13, 37, 63, 67], "imag": [1, 4, 13, 14, 21, 26, 37, 38, 39, 49, 59, 61, 70, 72, 73, 76, 77, 78], "image_arrai": 24, "image_input": [1, 7, 8, 20, 24, 26], "imageclassifi": [26, 78], "imagefeaturetyp": [34, 76], "imagefilteringmodel": 24, "imagenet": [8, 26, 28, 49, 59, 70], "imagenet1k_v2": 41, "imagenet_slim_label": 7, "imagenetlabel": [8, 26], "imagesegment": [5, 78], "imagetyp": [1, 7, 8, 13, 18, 20, 26, 31, 32, 38, 70, 72, 76, 77], "img": [7, 8, 24, 34, 72], "img_as_np_arrai": 34, "img_np": [7, 8, 34, 72], "img_path": 8, "img_tf": 7, "img_torch": 8, "impact": [46, 51, 71], "implement": [2, 6, 18, 20, 40, 42, 43, 47, 51, 57, 62], "impli": [3, 53], "implicit": [2, 33], "implicitli": 67, "import": [1, 5, 6, 7, 8, 13, 15, 16, 17, 18, 19, 20, 21, 26, 27, 28, 29, 30, 31, 33, 34, 35, 36, 37, 40, 41, 42, 47, 52, 57, 60, 61, 63, 65, 66, 67, 69, 70, 71, 72, 75, 76, 77, 78, 79], "import_graph_def": [7, 69, 72], "impos": [53, 57], "improv": [2, 16, 20, 23, 26, 34, 39, 41, 42, 53, 54, 58, 60, 63, 67], "imshow": 72, "in_channel": [21, 35], "incept": 7, "inception_preprocess": 7, "inception_v1": 7, "inception_v1_2016_08_28_frozen": 7, "inception_v1_url": 7, "inceptionv1": 7, "inceptionv1_logits_predictions_softmax": 7, "includ": [0, 2, 5, 7, 8, 10, 20, 23, 24, 25, 26, 28, 30, 34, 35, 37, 38, 39, 53, 65, 68, 73], "incorpor": [2, 8, 26], "incorrect": 40, "increas": [42, 49, 59, 60, 61, 63], "incremenet": 52, "increment": [52, 67], "inde": 72, "independ": [37, 41, 42, 71], "index": [0, 37, 48], "index_typ": [75, 77], "indic": [3, 8, 23, 46, 48, 53, 72, 77], "individu": [54, 75], "ineffici": 24, "infer": [6, 19, 20, 28, 37, 39, 40, 41, 42, 43, 54, 56, 57, 58, 59, 60, 61, 64, 67, 69, 71], "infin": 21, "influenc": 57, "info": [31, 46, 51], "inform": [1, 5, 6, 7, 8, 15, 19, 20, 21, 23, 25, 26, 27, 28, 29, 30, 32, 33, 34, 39, 40, 42, 46, 47, 52, 66, 69, 76, 78, 79], "infrequ": 21, "ingredi": 43, "inherit": [5, 10, 67], "initi": [6, 10, 11, 21, 24, 34, 41, 47, 49, 57, 67, 69, 70], "initial_spars": 60, "initializer_nod": 69, "inject": 40, "innerproduct": 76, "inp": [20, 30, 52], "inplac": [41, 47, 52, 57, 60], "input": [1, 2, 3, 4, 11, 13, 15, 19, 22, 26, 27, 28, 31, 33, 35, 36, 37, 39, 41, 42, 46, 47, 51, 52, 56, 57, 61, 62, 63, 67, 70, 71, 72, 73, 75, 76, 77, 78, 79], "input_1": [21, 24, 26, 30], "input_2": [21, 30], "input_adpated_model_1": 37, "input_adpated_model_2": 37, "input_batch": 5, "input_binari": 69, "input_cach": [57, 61], "input_checkpoint": 69, "input_data": 30, "input_descript": [26, 29], "input_dict": 6, "input_graph": 69, "input_id": [11, 67], "input_imag": 5, "input_lay": 15, "input_length": 6, "input_mask": 15, "input_nam": [20, 21, 24, 27, 30, 34, 75, 76, 77], "input_nod": 6, "input_ord": 19, "input_sampl": 61, "input_sav": 69, "input_shap": [15, 21, 26, 28, 34, 70, 76], "input_signatur": 70, "input_spec": [19, 33, 67], "input_tensor": [5, 69], "input_typ": 19, "input_valu": 15, "input_word": 15, "inputlay": 70, "inputspec": 19, "insert": [34, 40, 46, 47, 52, 57, 77], "insid": [2, 6, 10, 35, 46], "insight": 30, "inspect": [6, 16, 30, 67, 69, 76], "inspect_input": 6, "inspect_input_featur": 76, "inspect_lay": 76, "inspect_loss_lay": 76, "inspect_optim": 76, "inspect_tf_output": 6, "inspect_updatable_lay": 76, "instabl": 40, "instal": [0, 5, 6, 7, 8, 10, 11, 26, 31, 39, 77], "instanc": [3, 24, 29, 30, 41, 43, 63, 67], "instanti": [10, 24, 31, 34, 35, 36], "instantli": 63, "instead": [7, 24, 30, 33, 34, 35, 39, 41, 42, 57, 71, 75], "instruct": [0, 11, 19, 20, 25, 33, 34, 39, 57, 73], "int": [24, 35, 40, 78], "int32": [10, 11, 15, 19, 32, 67], "int4": [42, 43], "int8": [40, 41, 42, 43, 47, 57, 58, 59, 60, 62, 63], "integ": [10, 32, 40, 42, 56, 58], "integr": [19, 25, 26, 34, 37, 39, 61, 63, 64], "intens": [46, 63], "intent": 35, "interact": 34, "interfac": [19, 24, 26, 29, 37, 61], "intermedi": [0, 2, 3, 13, 16, 19, 20, 58, 63, 65, 67, 71], "intern": [2, 20, 39, 67], "interpol": 58, "interv": [7, 21, 24, 26, 70], "intinputtyp": 19, "introduc": [2, 12, 13, 19, 20, 40, 52, 54, 57, 67], "introduct": [13, 35, 43, 52], "invalid": 75, "inverse_dist": [75, 77], "investig": 23, "invoc": 63, "invok": [3, 8, 24, 31, 34, 35, 36, 37, 40, 47, 61, 69], "involv": [6, 46, 61, 63, 67], "io": [13, 16, 20, 21, 24, 26, 38, 39, 42, 49, 54, 59, 61, 68, 69], "ios11": 68, "ios14": [16, 17, 68], "ios15": [16, 17, 24, 28, 32, 38, 68], "ios16": [24, 32, 48, 53, 62], "ios17": [40, 49, 54, 57, 62], "ios18": [37, 40, 42, 43, 47, 48, 49, 53, 59, 60, 62, 63, 67], "ipad": 61, "iphon": [30, 42, 43, 49, 54, 57, 58, 59, 60, 61], "ipython": [24, 75], "ir": [2, 20], "irrelev": 2, "irrespect": 71, "is_custom_op": 19, "is_symbol": 19, "is_updat": 75, "isinst": 26, "isn": [61, 67], "issu": [13, 20, 31], "isupdat": 77, "italic": 42, "item": [30, 35], "iter": [30, 41, 47, 52, 67], "its": [1, 2, 8, 10, 11, 13, 19, 24, 25, 26, 28, 29, 30, 31, 34, 36, 37, 41, 43, 51, 53, 54, 59, 61, 62, 64, 67, 69, 71, 72, 75], "itself": [2, 42, 71], "j": 78, "java": 29, "jit": [5, 8, 10, 13, 21, 24, 28, 31, 34, 36, 37, 41, 47, 57, 63, 67], "job": 25, "john": [11, 29], "join": [7, 69], "joint": [40, 43], "joint_compress": 41, "joint_compressed_mlmodel": 40, "joint_compressed_model": [40, 41], "jointli": [51, 54], "jpeg": [24, 34], "jpg": [1, 5, 7, 8, 24, 26, 34, 72], "json": [5, 78], "just": [2, 3, 6, 20, 35, 40, 43, 47, 49, 57, 63, 67, 72], "k": [7, 18, 19, 31, 42, 49, 62, 63, 65, 67, 75, 77], "k_cach": 67, "kd_tree": 75, "keep": [35, 36, 42, 43, 46, 64, 67], "keep_dim": 33, "keepdim": 10, "kei": [8, 20, 29, 30, 32, 39, 40, 46, 47, 67, 78], "kenlm": 6, "kept": 65, "kera": [3, 15, 18, 23, 24, 26, 28, 39, 68, 76], "keras_convert": 76, "keras_model": [1, 24, 26, 76], "keras_model_path": 76, "keras_url": 76, "keraslay": [15, 70], "kerasmnist": 76, "kernel": [43, 62], "kernel_s": [21, 35, 76], "keys_vector": 8, "kick": 47, "kind": [1, 2, 43, 51], "kmean": [30, 47, 61, 63, 65], "kmeans_lut": 65, "knearestneighborsclassifi": 75, "knearestneighborsclassifierbuild": [75, 77], "knn": 75, "knn_builder": 77, "knn_spec": 77, "knob": 47, "know": [3, 21, 26, 30, 33, 51, 69, 72], "knowledg": 42, "known": [19, 51, 56, 58, 59, 61, 68], "kv_cache_st": 67, "kvcache_shap": 67, "kwarg": [19, 65], "l2": [51, 56], "label": [1, 5, 7, 26, 41, 52, 57, 60, 75, 76, 77, 78], "label_fil": 7, "label_url": [8, 26], "labels_json": [5, 78], "lack": [3, 69], "lambada_openai": 42, "lambda": 47, "lanczo": [7, 8], "landscap": 61, "languag": [0, 2, 3, 6, 12, 13, 16, 19, 20, 21, 28, 29, 51, 52, 61, 67], "larg": [18, 20, 21, 30, 34, 37, 42, 43, 46, 48, 51, 58, 61, 63, 67, 71], "large_weight": 30, "larger": [30, 42, 46, 54, 57], "largest": 61, "last": [25, 30, 35, 41, 57, 75, 76, 77], "last_weight_nam": 30, "latenc": [20, 34, 39, 41, 43, 49, 51, 53, 54, 58, 59, 63, 64], "latent": 61, "later": [21, 62, 63, 67, 76], "latest": [20, 25], "launch": [5, 15, 26, 78], "layer": [2, 6, 15, 20, 30, 31, 33, 35, 36, 37, 40, 42, 51, 52, 53, 56, 57, 61, 67, 68, 70, 71, 76, 77], "layer1": 36, "layer2": 36, "layerwis": 56, "layerwise_compress": [56, 61, 62, 63], "layerwisecompressor": [51, 52, 56, 57, 61], "layerwisecompressorconfig": [52, 57, 61], "lead": [40, 42, 43, 46, 48, 49, 51, 59, 63], "leaf": 75, "learn": [0, 2, 3, 5, 8, 13, 18, 24, 26, 29, 30, 37, 38, 39, 40, 43, 46, 51, 57, 60, 64, 68, 70, 71, 73, 78], "learningr": 76, "learnt": [42, 57], "least": 32, "leav": 67, "left": [2, 5, 26, 78], "leftmost": 1, "len": [7, 8, 26, 69, 72], "length": [6, 11, 21, 42, 48], "less": [20, 30, 34, 35, 39, 40, 42, 43, 51, 60, 64, 65], "let": [7, 8, 13, 24, 34, 42, 43, 47, 51, 57, 60, 61, 65, 67, 75, 76], "level": [2, 6, 8, 29, 33, 35, 40, 43, 47, 51, 52, 53, 54, 57, 61, 63, 70, 72], "leverag": [43, 58, 60, 64], "levl": 51, "liang": 26, "librari": [0, 2, 3, 7, 13, 25, 72], "libsvm": [0, 18, 64, 73], "libsvm_model": 27, "licens": [23, 26, 29, 75, 76, 77], "lie": 7, "life": 11, "lift": 6, "like": [2, 5, 6, 8, 18, 26, 34, 35, 40, 49, 51, 57, 63, 67, 76], "limit": [2, 5, 13, 21, 28, 40, 42, 57, 68], "line": [2, 13, 25, 31, 63, 76], "linear": [0, 2, 31, 37, 40, 42, 43, 47, 52, 53, 54, 57, 58, 63, 64, 65, 67, 75, 77], "linear1": 37, "linear2": 37, "linear_2": 40, "linear_config": [47, 52], "linear_model": [29, 66], "linear_quantize_activ": [56, 57, 60, 62, 63], "linear_quantize_weight": [41, 42, 56, 57, 62, 63], "linear_symmetr": [41, 57, 63, 65], "linear_weight_quantize_config": 41, "linearli": [54, 57, 58], "linearquant": [41, 56, 57, 60, 62], "linearquantizerconfig": [41, 57, 60], "linearregress": [29, 66], "linearsymmetr": 65, "link": [25, 34, 46, 49, 51, 54, 59, 72], "linux": [16, 25, 34, 73], "list": [8, 10, 11, 19, 23, 29, 30, 33, 39, 40, 52, 57, 60, 62, 63, 65, 67, 68, 69, 70, 76], "listinputtyp": 19, "literatur": 49, "littl": [42, 61], "ll": [36, 61], "llama": 11, "lm": 67, "lm_logit": 42, "load": [1, 3, 6, 9, 13, 15, 16, 18, 19, 20, 21, 24, 30, 33, 34, 37, 38, 41, 42, 47, 49, 54, 59, 65, 66, 69, 70, 71, 72, 78], "load_data_it": 61, "load_dataset": 42, "load_from_path": 30, "load_imag": 34, "load_image_as_numpy_arrai": 34, "load_model": [70, 76], "load_spec": [21, 29, 76], "load_state_dict": 67, "loaded_model": 26, "local": 7, "locat": [34, 53, 57], "log": [23, 25, 33], "log10": 72, "log_epsilon_0": 33, "logic": 35, "logit": [6, 7, 11, 41, 42, 67], "logits_sequ": 6, "logsoftmax": 41, "long": [6, 20, 34, 72], "longer": [20, 21, 31, 39, 54, 68, 72], "look": [3, 13, 16, 41, 42, 60, 61, 63, 67, 72], "lookup": [40, 41, 43, 46, 47, 48, 61, 63, 65], "loop": [6, 10, 13, 35, 36, 47, 57], "loop_bodi": 35, "loop_count": 35, "lora_alpha": 37, "lora_config": 37, "loraconfig": 37, "lose": [43, 60, 63, 65], "loss": [41, 42, 46, 47, 51, 52, 57, 60, 61, 63, 65, 76], "loss_fct": 42, "loss_fn": [42, 47, 57], "loss_funct": 63, "losslay": 76, "lost": [56, 61], "lot": [40, 51, 54], "love": 11, "low": [37, 40, 46, 70], "lower": [7, 42, 43, 46, 48, 63, 65], "lower_bound": [21, 67], "lowest": [51, 52, 53], "lr": [41, 76], "lstm": 6, "lut": [40, 41, 46, 47, 49, 60, 61, 62, 63], "lut1": 40, "lut2": 40, "lut_1_param": 40, "lut_2_param": 40, "lut_dtyp": [41, 47], "m": [25, 42, 49, 51, 52, 53, 54, 59, 61, 67], "m1": 34, "m3": 67, "m4": [41, 43, 58, 59, 60, 62], "mac": [5, 15, 25, 26, 42, 43, 78], "macbook": [34, 67], "machin": [2, 3, 26, 29, 37, 64, 71], "maco": [10, 11, 13, 16, 24, 26, 38, 39, 42, 61, 64, 68, 73], "macos10": 68, "macos12": [16, 17, 24, 28, 32, 38, 68], "macos13": [24, 32, 40, 48, 53, 62], "macos14": [49, 62], "macos15": [37, 41, 43, 47, 48, 49, 53, 59, 62, 67], "macosx_10_12_intel": 25, "made": [26, 34, 42, 54], "magnitud": [18, 41, 51, 52, 53, 62], "magnitudeprun": [41, 52, 54, 60, 62], "magnitudeprunerconfig": [41, 52, 60], "magniutdeprun": 52, "magniutdeprunerconfig": 52, "mai": [2, 3, 7, 8, 13, 19, 20, 21, 23, 24, 26, 28, 30, 34, 37, 38, 41, 42, 43, 44, 49, 51, 54, 56, 57, 58, 59, 60, 62, 63, 65, 67, 71, 72], "main": [2, 30, 33, 35, 37, 61], "main_1": 30, "main_2": 30, "mainfunct": 30, "mainli": 63, "maintain": [20, 43, 51], "mainten": 68, "major": [2, 10, 20, 68, 71], "make": [1, 5, 6, 18, 20, 21, 23, 24, 27, 28, 29, 30, 31, 33, 34, 35, 36, 43, 52, 64, 67], "make_st": 67, "makedir": 7, "man": [11, 78], "manag": [6, 24, 25], "mandatori": 19, "manhattan": 10, "mani": [2, 13, 37, 63], "manipul": 34, "manner": [10, 11, 30, 42, 51, 52, 53, 57, 62, 67], "manual": 29, "map": [30, 34, 49, 58, 72], "margin": 60, "mari": 11, "mark": [21, 26, 42, 76], "mask": [5, 51, 53], "massiv": [42, 51, 52], "master": [23, 25, 26], "match": [2, 6, 7, 19, 30, 34, 40, 61, 65, 67, 71, 72], "materi": 64, "mathemat": [2, 3, 58], "matmul": [3, 69, 70], "matplotlib": [7, 72], "matric": [42, 46, 47, 48, 53], "matrix": [3, 42, 46, 48, 51], "matter": 43, "max": [7, 11, 31, 34, 58, 67, 72, 76], "max_error": 30, "max_pool2d": 36, "max_seq_len": 67, "max_seq_length": 15, "max_sequence_length": 11, "max_signal_energi": 72, "max_time_step": 6, "maxim": 71, "maximum": [5, 21, 47], "maximum_sequence_length": 15, "maxpooling2d": 76, "mb": [3, 19, 33, 60, 61, 67], "md": [22, 23, 25], "mean": [5, 6, 8, 24, 30, 35, 40, 41, 42, 49, 51, 57, 59, 61, 62, 63, 65, 69, 71, 76, 77], "meaning": 51, "measur": [34, 42, 57, 60, 61], "median": [49, 54, 59], "meet": [51, 60], "mel": 6, "member": [19, 23], "memori": [2, 6, 13, 21, 26, 34, 41, 42, 43, 46, 49, 53, 54, 58, 59, 63, 64], "menglong": 26, "mention": [49, 52, 54, 59, 62], "menu": 23, "merg": [25, 37], "merge_chunks_to_pipelin": 30, "messag": [2, 8, 26], "meta": 11, "metadata": [1, 15, 16, 18, 28, 76, 77, 78], "metadata_vers": 40, "metal": 2, "method": [0, 7, 8, 10, 11, 13, 16, 20, 24, 25, 26, 28, 29, 30, 32, 34, 40, 41, 42, 44, 47, 49, 52, 56, 57, 60, 62, 63, 64, 67, 68, 69, 70, 71, 72, 73, 75, 76], "metric": [30, 49, 59, 63, 72, 76], "mfcc": 6, "mid": 61, "mid_block": 61, "might": 10, "mil": [16, 20, 22, 26, 32, 40, 68], "mileston": [47, 57, 60], "million": 10, "millisecond": 43, "milspec": 68, "min": [11, 31, 42, 58, 76], "mind": 46, "minibatchs": 76, "miniconda": [7, 25, 72], "minim": [29, 51, 56, 63, 64], "minimum": [2, 16, 21, 38, 43, 47, 67, 71], "minimum_conv_kernel_channel": 65, "minimum_conv_weight_count": 65, "minimum_deployment_target": [16, 17, 24, 28, 32, 37, 38, 40, 41, 47, 57, 63, 67, 68], "minor": 61, "mint": 61, "minut": [34, 42, 43, 60, 63], "mismatch": 71, "miss": [20, 24, 69], "mistral": 67, "mistral7b": 67, "mit": [75, 76, 77], "mix": [10, 71], "mkdtemp": 69, "ml": [1, 3, 4, 5, 6, 9, 12, 14, 15, 17, 19, 22, 23, 27, 28, 30, 31, 32, 34, 35, 36, 37, 40, 42, 43, 48, 49, 51, 53, 56, 58, 60, 61, 65, 66, 69, 70, 74, 75, 77, 78, 79], "ml_model": 30, "mlcompiledmodel": 30, "mlcomputeplan": 30, "mlcomputeunit": [28, 71], "mlcustomlay": 19, "mlfeaturetyp": 34, "mlfeaturevalu": 34, "mlmodel": [2, 3, 5, 6, 7, 8, 10, 11, 15, 16, 20, 21, 23, 24, 26, 27, 28, 31, 32, 33, 34, 35, 37, 39, 40, 41, 47, 61, 63, 65, 66, 67, 68, 69, 70, 75, 76, 77, 78, 79], "mlmodel_1": 37, "mlmodel_2": 37, "mlmodel_compress": 63, "mlmodel_compressed_activ": 63, "mlmodel_from_export": [5, 8], "mlmodel_from_trac": [5, 8], "mlmodel_palett": 41, "mlmodel_palettized_with_8bit_lut": 41, "mlmodel_prun": 41, "mlmodel_pruned_palett": 41, "mlmodel_pruned_quant": 41, "mlmodel_updat": [75, 76], "mlmodel_updatable_path": [75, 76], "mlmodel_url": 76, "mlmodelasset": 30, "mlmodelc": [2, 30, 34], "mlmodelconfigur": [28, 34], "mlmultiarrai": [8, 18, 32, 34, 38, 69, 70], "mlpackag": [2, 5, 7, 8, 13, 15, 16, 20, 21, 24, 26, 30, 34, 37, 43, 57, 60, 61, 62, 63, 68, 69, 70, 78], "mlpackg": 34, "mlprogram": [2, 16, 21, 24, 28, 30, 32, 37, 38, 48, 53, 57, 62, 65, 67, 68, 69, 70], "mlupdatetask": 74, "mnist": [47, 63, 76], "mnistdigitclassifi": 76, "mobil": [8, 11], "mobilebert": [13, 31], "mobilenet": [8, 13, 24, 26, 28, 31, 69, 70], "mobilenet_v1_1": 28, "mobilenet_v2": [8, 13, 28], "mobilenet_v2_050_192": 70, "mobilenet_v2_1": [28, 69], "mobilenetv2": [13, 18, 24, 26, 46, 49, 51, 54, 56, 59, 69], "mobilenetv3": [46, 49, 51, 54], "mobilevitv2": [49, 56, 59], "mod": 47, "mode": [20, 30, 40, 41, 42, 43, 47, 48, 49, 51, 57, 58, 59, 60, 61, 62, 63, 65, 68], "model": [0, 4, 12, 14, 22, 23, 27, 29, 38, 39, 43, 44, 46, 48, 51, 53, 54, 56, 58, 62, 64, 65, 66, 68, 71, 73, 75, 79], "model_4bit_palettized_with_8bit_quantized_lut": 41, "model_8bit": 65, "model_compress": 52, "model_config": 63, "model_dir": 69, "model_expected_input_shap": 34, "model_fp16": 65, "model_fp32": [65, 72], "model_from_export": 13, "model_from_tf": [19, 28], "model_from_torch": 28, "model_from_trac": 13, "model_input_nam": 72, "model_output_nam": 72, "model_path": 30, "model_pb2": 77, "model_spec": [30, 76], "model_structur": 30, "model_torch_pruned_and_palett": 41, "model_torch_pruned_and_quant": 41, "model_util": 47, "model_with_lut_weight": 40, "model_with_quantized_weight": 40, "model_with_sparse_weight": 40, "modern": 61, "modif": 61, "modifi": [29, 30, 34, 63, 67], "modul": [5, 10, 13, 21, 24, 31, 35, 36, 37, 40, 47, 49, 51, 52, 61, 67, 75], "module_name_config": [47, 57], "module_type_config": [47, 52, 57], "moduledkmpalettizerconfig": 60, "modulelinearquantizerconfig": [41, 57, 60], "modulemagnitudeprunerconfig": [41, 60], "moduleposttrainingpalettizerconfig": [41, 61], "modulesparsegptconfig": 61, "moment": 11, "momentum": 76, "monterei": [10, 11], "moon": 24, "more": [1, 2, 5, 6, 7, 8, 10, 11, 16, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 35, 37, 40, 41, 42, 43, 46, 47, 49, 51, 52, 53, 54, 57, 58, 59, 60, 63, 65, 66, 67, 68, 71, 72, 76, 78, 79], "most": [2, 3, 6, 25, 26, 28, 33, 34, 37, 39, 40, 42, 43, 46, 47, 56, 61, 63, 70, 71, 72, 75], "mostli": 59, "motorbik": [5, 78], "mountain": 61, "move": [13, 31, 39, 46, 49, 59, 60, 62, 63], "movement": 43, "mpsgraph": 20, "msg": 19, "much": [23, 28, 34, 37, 42, 43, 51, 60, 61, 63, 67, 76], "mul": 3, "multi": [18, 19, 67], "multiarrai": [20, 30, 32, 34, 78], "multiarraytyp": [76, 77], "multidimension": [24, 32, 70], "multifunct": [0, 39], "multifunctiondescriptor": 37, "multipl": [3, 21, 22, 33, 37, 42, 48, 51, 54, 63, 78], "multipli": [7, 51], "must": [2, 8, 19, 21, 23, 24, 32, 41, 67, 76], "my": [15, 20], "my_input_nam": 32, "my_model": [16, 27, 30, 79], "my_model_chunk1": 30, "my_model_chunk2": 30, "my_model_chunked_pipelin": 30, "my_output_nam": 32, "my_updated_model": 30, "mylayerselector": 65, "mymodel": [20, 21, 30], "mymodel_upd": 21, "n": [6, 7, 40, 46, 48, 49, 51, 52, 53, 54, 56, 59, 69, 72], "n_bit": [40, 41, 42, 47, 60, 61, 63], "n_class": 41, "n_m_ratio": 52, "n_step": 6, "name": [1, 2, 3, 5, 6, 7, 8, 10, 11, 15, 19, 21, 24, 25, 26, 27, 28, 29, 30, 31, 33, 37, 40, 46, 47, 49, 51, 52, 54, 56, 57, 59, 65, 67, 69, 72, 76, 77, 79], "namespac": 62, "nativ": [40, 71], "natur": 61, "navig": [5, 26], "nbit": [30, 41, 43, 47, 61, 63, 65], "ne": [20, 28, 34, 38, 42, 43, 57, 59, 60, 62, 64, 71, 72], "nearest": [18, 42, 56, 57, 74], "nearest_neighbor": [75, 77], "necessari": [21, 31, 61, 67], "need": [3, 6, 7, 8, 16, 19, 20, 21, 23, 24, 25, 26, 28, 30, 31, 33, 34, 35, 36, 39, 40, 41, 42, 43, 47, 52, 54, 57, 59, 60, 61, 63, 64, 67, 69, 71, 72, 76, 78], "neighbor": [18, 74], "neither": 38, "net": 26, "netron": 39, "network": [0, 4, 6, 7, 8, 9, 14, 16, 18, 19, 24, 28, 31, 33, 34, 35, 36, 37, 39, 47, 54, 56, 58, 59, 60, 64, 68, 69, 70, 72, 73, 74, 77, 78], "neural": [0, 4, 6, 7, 8, 9, 14, 16, 18, 19, 24, 28, 30, 33, 34, 35, 39, 41, 42, 43, 49, 54, 56, 57, 58, 59, 60, 63, 64, 68, 69, 70, 72, 73, 74, 77, 78], "neural_network": [21, 62, 65, 76], "neuralnetwork": [2, 17, 21, 26, 28, 29, 30, 62, 65, 68, 71], "neuralnetwork_spec": 76, "neuralnetworkbuild": 76, "neuralnetworklay": 65, "neuralnetworkshap": 39, "never": 57, "new": [0, 2, 3, 4, 6, 7, 10, 13, 19, 20, 23, 34, 37, 40, 43, 47, 63, 64, 67, 68, 69, 72], "new_feature_nam": 30, "new_state_c": 6, "new_state_h": 6, "newer": [2, 5, 10, 11, 13, 17, 20, 25, 26, 28, 31, 32, 39, 41, 43, 47, 48, 53, 54, 58, 59, 60, 68, 71, 72, 78], "newest": [20, 23, 25, 26, 38, 39], "newli": [6, 7, 13, 26, 31, 40, 67, 72], "newly_computed_k": 67, "newly_computed_v": 67, "newmodel_from_export": 13, "newmodel_from_trac": 13, "next": [8, 10, 11, 34, 37, 42, 51, 56, 57, 60, 61, 67], "next_token_predictor": 10, "nll_loss": [41, 47], "nn": [5, 10, 13, 21, 24, 31, 35, 36, 37, 41, 42, 47, 57, 67, 70], "no_grad": [5, 57, 60], "node": [2, 3, 19], "nois": 72, "noise_var": 72, "non": [21, 26, 34, 40, 41, 47, 48, 52, 53, 57, 62, 67, 76], "none": [5, 6, 10, 21, 25, 30, 34, 41, 47, 52, 57, 69, 72, 75], "nop": 7, "nor": 38, "norm": [13, 51, 52, 56, 57, 60], "normal": [8, 24, 26, 28, 47, 48, 57, 70], "notat": 3, "note": [20, 21, 28, 33, 40, 41, 42, 49, 51, 54, 57, 59, 60, 62, 67], "notebook": 47, "noth": [30, 31, 63], "noutput": 7, "now": [5, 6, 7, 8, 10, 11, 13, 20, 26, 30, 34, 38, 40, 41, 42, 47, 60, 67, 68, 71, 75, 77], "np": [6, 7, 8, 10, 11, 15, 21, 24, 30, 32, 33, 34, 37, 67, 69, 70, 72], "nprospect": 69, "nsampl": 42, "null": [6, 47, 57], "num_channel": 35, "num_epoch": [52, 60], "num_error": 30, "num_iter": 67, "num_kmeans_work": 42, "num_palettization_epoch": 47, "number": [6, 10, 18, 21, 29, 30, 37, 42, 47, 48, 49, 54, 58, 59, 60, 61, 65, 76], "number_of_dimens": [75, 77], "number_of_neighbor": 75, "number_of_neighbors_allowed_rang": 75, "number_of_neighbors_allowed_set": 75, "numberofneighbor": 75, "numer": [2, 7, 30, 40, 42, 44, 67, 69, 71], "numpi": [5, 7, 8, 10, 11, 15, 21, 24, 26, 30, 32, 33, 34, 37, 67, 69, 70, 72], "o": [7, 23, 31, 43, 44, 47, 60, 61, 62, 69], "object": [2, 3, 5, 6, 10, 16, 18, 20, 21, 26, 28, 29, 30, 31, 34, 36, 46, 47, 49, 60, 68, 69, 70], "observ": [1, 42, 43, 57, 58, 59, 60], "obtain": [6, 28, 42, 49, 51, 56, 57], "occupi": [20, 65], "occur": [3, 34, 54, 75], "off": [6, 42, 43, 47, 57, 60, 63, 72, 76, 77], "offer": [6, 13, 16, 20, 30, 39, 43, 51], "offic": 61, "offici": 20, "offset": [42, 58, 62, 63], "often": [2, 26, 42, 63], "oil": 61, "old": [7, 20, 72], "old_feature_nam": 30, "older": [16, 17, 20, 26, 28, 68], "omit": [21, 47, 67], "onc": [8, 11, 23, 30, 35, 41, 42, 43, 47, 51, 56, 57, 60, 61, 67, 69], "one": [2, 6, 8, 10, 11, 20, 21, 24, 25, 29, 31, 33, 34, 36, 37, 43, 47, 51, 59, 60, 61, 62, 65, 67, 70, 71, 76], "onecomponent16half": 24, "ones": [6, 13, 31, 47, 63, 69], "onli": [2, 3, 5, 6, 19, 20, 23, 24, 26, 28, 30, 31, 34, 35, 36, 37, 40, 41, 42, 43, 46, 47, 49, 52, 53, 56, 57, 58, 59, 60, 61, 62, 63, 67, 68, 71, 72], "onnx": [12, 13, 20, 23, 39], "onnx_coreml": 39, "onward": 57, "op": [2, 7, 13, 19, 22, 26, 30, 31, 33, 40, 47, 52, 54, 56, 65, 67, 68, 69, 71, 72], "op_config": [41, 47, 52, 57, 61, 63], "op_name_config": [30, 47, 52], "op_typ": 30, "op_type_config": [30, 47, 52], "opactivationlinearquantizerconfig": [57, 63], "opaqu": 67, "open": [1, 7, 8, 12, 13, 15, 16, 18, 21, 23, 26, 30, 34, 39, 72], "openelm": [11, 13, 31], "oper": [2, 4, 6, 7, 8, 13, 20, 24, 25, 30, 31, 33, 36, 41, 42, 46, 47, 51, 57, 58, 65, 69, 70, 72, 73, 75, 77], "oplinearquantizerconfig": [41, 57, 63], "opmagnitudeprunerconfig": [41, 52], "oppalettizerconfig": [30, 41, 47, 61, 63], "opportun": 21, "opset": 2, "opt": [30, 44], "optforcausallm": 42, "opthresholdprunerconfig": 52, "optim": [7, 8, 10, 13, 19, 22, 28, 30, 32, 33, 34, 35, 38, 39, 40, 41, 43, 44, 46, 47, 49, 51, 52, 54, 56, 57, 58, 59, 64, 65, 67, 71, 76], "optimization_hint": [21, 34], "optimizationconfig": [30, 41, 47, 52, 57, 61, 63], "option": [0, 5, 16, 19, 20, 21, 22, 26, 30, 32, 33, 36, 40, 42, 43, 47, 52, 57, 59, 62, 73, 76], "orang": 23, "order": [7, 10, 19, 22, 26, 28, 30, 32, 33, 40, 44, 46, 47, 51, 60, 61, 68, 70], "org": [7, 8, 26, 70, 78], "origin": [2, 5, 7, 8, 10, 11, 24, 26, 29, 30, 35, 40, 48, 51, 56, 57, 63, 67, 68, 69, 72], "original_gdef": [7, 72], "other": [2, 6, 10, 13, 21, 23, 24, 25, 26, 34, 35, 36, 40, 42, 43, 44, 47, 53, 57, 59, 60, 65, 67, 69, 71, 73, 76], "otherwis": [35, 36, 38, 49, 51, 54, 59], "our": [15, 31, 36, 47, 60, 76], "out": [1, 5, 8, 20, 26, 29, 30, 39, 41, 43, 47, 51, 52, 54, 60, 62, 63, 64, 69, 75], "out_adpated_model_1": 37, "out_adpated_model_2": 37, "out_channel": [21, 35], "out_dict": [1, 24, 26, 34], "outer": [10, 58], "outlier": 42, "output": [1, 2, 4, 5, 6, 7, 10, 11, 13, 15, 18, 21, 22, 26, 27, 33, 35, 37, 39, 42, 47, 48, 51, 56, 57, 58, 61, 63, 67, 69, 70, 71, 73, 75, 76, 77, 78, 79], "output_1": 21, "output_2": 21, "output_descript": [26, 29], "output_dict": 34, "output_dir": 30, "output_graph": [6, 69], "output_id": 11, "output_nam": [20, 30, 69, 75, 76, 77], "output_node_nam": 69, "output_path": 77, "output_predict": 5, "output_strid": 78, "output_text": 11, "outsid": [2, 75], "over": [2, 3, 10, 13, 24, 30, 31, 35, 40, 42, 43, 49, 54, 60, 63, 67, 71, 72], "overal": [40, 71], "overhead": [32, 42], "overlai": 5, "overlaid": 5, "overrid": [16, 19, 28, 32, 38, 68], "overridden": 19, "overview": [3, 20, 22, 28, 30, 44, 45, 50, 55, 60, 63, 65], "own": [2, 18, 32, 58], "pack": [19, 53], "packag": [2, 6, 7, 8, 13, 21, 23, 24, 26, 29, 30, 37, 39, 64, 65, 69, 70, 73, 77], "pad": 35, "page": [0, 10, 20, 23, 25, 39, 43, 46, 47, 51, 63, 64, 65, 70, 71, 73, 78], "pair": 67, "palett": [0, 5, 18, 20, 30, 39, 43, 49, 51, 53, 54, 58, 62, 63], "palettization_config": [41, 47], "palettization_config_dict": 47, "palettization_scal": 40, "palettize_config": 61, "palettize_weight": [30, 41, 42, 46, 47, 60, 61, 62, 63], "palettized_coreml_model": 47, "palettized_model": [60, 63], "palettized_torch_model": [40, 47], "palettized_weight": 30, "palettizer_config": 41, "pallet": 60, "palm": 24, "palmtre": 24, "palmtrees_256_by_256": 24, "palmtrees_result": 24, "panda": [29, 66], "pane": [5, 15, 26, 78], "paper": [26, 52, 56], "paradigm": 56, "parallel": 42, "param": [5, 78], "paramet": [2, 5, 7, 8, 13, 16, 17, 18, 19, 20, 21, 22, 24, 26, 28, 29, 30, 32, 33, 34, 35, 37, 38, 40, 41, 42, 43, 46, 47, 48, 52, 57, 63, 65, 67, 68, 69, 73, 75, 76, 78], "parameter_nam": 40, "params_json": 78, "params_name_map": 30, "parent": 69, "park": 61, "pars": [29, 69], "parsefromstr": [7, 69, 72], "part": [10, 19, 35, 76, 77], "parti": [33, 40, 64], "partial": 10, "particular": [2, 3, 25, 37, 43, 60], "partit": 71, "pascal": 5, "pass": [2, 3, 4, 5, 7, 10, 19, 20, 24, 25, 26, 28, 30, 35, 36, 41, 42, 47, 51, 56, 57, 58, 60, 62, 67, 70, 72, 73], "pass_pipelin": [22, 40], "passag": 15, "passpipelin": 40, "past": [15, 23], "past_kv_len": 67, "path": [2, 7, 13, 20, 21, 28, 29, 30, 31, 34, 38, 41, 43, 57, 68, 69, 70, 71, 76], "pattern": [41, 51], "pb": [6, 7, 20, 28, 68, 69, 72], "pd": [29, 66], "peft": 37, "per": [5, 24, 40, 41, 43, 47, 49, 54, 56, 57, 58, 59, 60, 61, 62, 63], "per_block": [42, 57, 58], "per_channel": [42, 52, 57, 58], "per_grouped_channel": [42, 46, 47, 48, 49, 60, 61, 63], "per_kernel": 52, "per_scalar": 52, "per_tensor": [41, 42, 47, 48, 58], "percentag": [30, 42], "percentil": [51, 52], "perf_count": [34, 67], "perform": [2, 6, 8, 10, 11, 13, 19, 20, 25, 26, 28, 33, 34, 36, 40, 41, 42, 45, 46, 47, 50, 51, 52, 53, 55, 56, 57, 58, 60, 63, 64, 67, 70, 71, 72], "period": [13, 31], "perl": 29, "perm": 33, "permit": 21, "permut": 33, "perplex": 42, "persist": [34, 67], "person": [5, 26, 74, 78], "perturb": 46, "photo": 24, "photograph": 61, "pi": 70, "pick": [20, 28, 32, 40, 71], "pil": [1, 5, 7, 8, 24, 26, 34, 72], "pil_img": 34, "pillow": [7, 26, 72], "pip": [5, 6, 7, 8, 10, 11, 23, 25, 26, 31, 72], "pipelin": [2, 18, 20, 25, 26, 29, 30, 43, 47, 61, 63, 64, 66, 74], "pipeline_spec": 77, "pipelineclassifi": 77, "pixel": [5, 7, 8, 18, 21, 24], "place": [2, 19, 30, 34, 41, 46, 64, 67, 77], "placehold": [7, 21, 69], "platform": [26, 61], "pleas": [13, 20, 23, 24, 26, 31, 47, 54, 59], "plot": [5, 72], "plt": 72, "plu": [35, 43], "plug": 47, "png": 24, "point": [7, 20, 35, 37, 40, 41, 42, 46, 48, 56, 57, 58, 65, 72, 78], "polynomialdecayschedul": [52, 60], "pool_siz": 76, "pooled_output": 15, "poor": 42, "popular": 26, "portion": [2, 10, 71], "pose": 18, "poseestim": 78, "posefind": 78, "posenet": 78, "posenet_model": 78, "posenet_with_preview_typ": 78, "posenetmobilenet075s16fp16": 78, "posit": [2, 6, 21, 33, 67], "possibl": [3, 6, 19, 21, 23, 28, 31, 35, 36, 38, 43, 54, 59, 60, 75, 76, 77], "post": [6, 42, 43, 46, 49, 52, 57, 59, 60, 61, 64], "postprocess": 6, "posttrainingpalett": [41, 42, 46, 47, 60, 62, 63], "posttrainingpalettizerconfig": [41, 42, 47, 60, 61, 63], "posttrainingquant": [42, 56, 57, 62], "posttrainingquantizerconfig": [42, 57], "potenti": [34, 67], "pottedpl": [5, 78], "pow": 70, "power": [20, 39, 43, 64], "practic": [21, 26, 30, 32, 37, 46, 51, 61, 69, 72], "pre": [6, 7, 8, 13, 18, 20, 24, 26, 28, 43, 46, 49, 51, 56, 59, 63, 64, 72, 78], "precis": [1, 2, 7, 20, 30, 32, 43, 46, 48, 54, 56, 60, 61, 63, 65], "pred": 6, "predefin": 26, "predetermin": [18, 20, 32], "predict": [0, 1, 5, 6, 10, 11, 15, 20, 21, 24, 28, 30, 33, 37, 49, 59, 63, 64, 69, 70, 71, 75, 76, 77, 78], "predict_with_coreml": 8, "predicted_feature_nam": 76, "predictedfeaturenam": 77, "predictedprobabilitiesnam": 77, "prediction_dict": 10, "prediction_model": 15, "prefer": 34, "prepar": [8, 41, 47, 52, 57, 60, 63], "prepare_qat_fx": 57, "preprocess": [1, 5, 13, 18, 26, 69], "preprocess_for_ev": 7, "presenc": 51, "present": [26, 28, 34, 43], "preserv": [30, 42, 54, 56, 63, 71], "pressur": 59, "pretrain": [5, 8, 13, 42, 60], "preview": [0, 5, 15, 24, 26, 29], "previou": [2, 5, 6, 7, 8, 10, 16, 21, 24, 25, 26, 28, 33, 34, 40, 41, 52, 59, 62, 67, 72, 75], "previous": [23, 26, 67], "previous_state_c": 6, "previous_state_h": 6, "price": [29, 66], "primari": [13, 43, 64], "primarili": [49, 54, 63], "print": [1, 3, 6, 7, 10, 11, 16, 20, 21, 26, 29, 30, 33, 34, 41, 67, 69, 72, 77], "print_funct": 7, "prior": [20, 24, 40, 43], "privat": 64, "pro": [34, 42, 43, 49, 54, 57, 58, 59, 60, 62, 67], "prob": 6, "probabl": [1, 6, 7, 11, 30, 35, 75, 76, 77], "problem": [23, 27, 51], "proce": 23, "process": [6, 7, 8, 13, 23, 26, 28, 30, 34, 37, 38, 39, 40, 42, 44, 46, 47, 53, 58, 61, 63, 64, 65, 67, 71, 75], "processor": 43, "produc": [2, 5, 6, 7, 10, 13, 16, 18, 24, 28, 30, 32, 33, 34, 35, 37, 38, 40, 41, 42, 51, 61, 62, 65, 67, 71, 72, 76], "product": 23, "prog": [33, 67], "program": [4, 5, 7, 8, 9, 13, 15, 17, 19, 20, 21, 28, 30, 32, 67, 68, 69, 70, 73], "programmat": [2, 26], "progress": [7, 23], "project": [2, 5, 7, 8, 25, 26], "prompt": [34, 42, 61], "promptli": 23, "pronounc": 42, "propag": [6, 31], "properli": [10, 23], "properti": [16, 19, 20, 30, 32, 65, 71, 75], "prospect": 69, "proto": [2, 29, 30, 68, 76, 77], "protobuf": [2, 20, 28, 29, 30, 33, 69], "provid": [1, 2, 5, 6, 8, 16, 17, 19, 20, 23, 24, 25, 26, 28, 30, 34, 42, 44, 46, 47, 49, 51, 52, 53, 58, 60, 63, 64, 65, 67, 68, 69, 70, 71, 76, 77, 78], "prune": [0, 18, 20, 39, 40, 41, 43, 53, 54, 58, 62, 63], "prune_config": [41, 61], "prune_weight": [41, 51, 52, 60, 62], "pruned_model": [41, 60], "pruned_quant_model": 41, "pruned_torch_model": 40, "pruner": [41, 52, 60, 61], "pruning_config": 41, "pruning_schedul": 60, "pt": [28, 36, 68], "ptp_config": 61, "ptq": 56, "public": [26, 29, 78], "publicli": 23, "pull": 23, "purpos": [21, 30, 61], "put": 19, "putalpha": 5, "putpalett": 5, "py": [6, 7, 26, 75], "pyplot": 72, "python": [2, 6, 7, 8, 18, 19, 20, 24, 26, 29, 33, 39, 47, 61, 64, 67, 69, 72, 73], "python_coreml_stable_diffus": 61, "pytorch": [0, 2, 16, 19, 21, 23, 25, 30, 31, 33, 34, 35, 36, 38, 42, 43, 49, 52, 60, 61, 62, 63, 64, 73, 78], "q": [31, 67], "q_len": 67, "qa": [15, 78], "qat": [56, 63], "qint": 40, "qint8": 41, "qualifi": 52, "qualiti": 42, "quant_config": 41, "quant_finalized_model": 41, "quant_max": 47, "quant_min": 47, "quant_model": 41, "quantiti": 43, "quantiz": [0, 18, 23, 30, 39, 43, 46, 47, 51, 52, 53, 54, 59, 61, 62, 63], "quantization_granular": 52, "quantization_mod": 65, "quantization_n_bit": 40, "quantization_scal": 40, "quantization_schem": [41, 52, 57, 60], "quantization_util": [62, 65], "quantizationgranular": 58, "quantize_activ": 47, "quantize_spec_weight": 39, "quantize_weight": [39, 65], "quantized_model": [57, 60, 65], "quantized_torch_model": 40, "quantizedlayerselector": 65, "queri": [20, 67], "query_length": 67, "question": [15, 20, 23, 42], "quick": [34, 43, 63, 77], "quickdraw": 77, "quicker": 60, "quickest": 60, "quickli": [43, 51], "quickstart": 24, "quint": 40, "quint8": 57, "quit": 60, "r": [5, 6, 37, 60], "rais": [19, 21, 75], "rand": [8, 13, 21, 24, 28, 31, 33, 34, 36, 37, 41, 57, 69, 70], "randint": [10, 41, 67], "randn": [35, 41], "random": [5, 8, 10, 13, 21, 24, 33, 34, 37, 41, 69, 70], "random_norm": 70, "random_token": 10, "randomli": [31, 36], "rang": [5, 8, 10, 11, 18, 20, 24, 35, 41, 47, 52, 56, 58, 60, 67, 72, 75], "range_shap": 21, "rangedim": [10, 21, 31, 67], "rank": [8, 37, 40], "rather": [20, 24, 26, 33, 34, 53], "ratio": [30, 42, 43, 46, 49, 51, 52, 54, 56, 59, 72], "raw": [6, 8, 25], "rb": [7, 30, 69, 72], "re": [5, 21, 30, 35, 67], "reach": 11, "read": [7, 8, 26, 30, 40, 42, 43, 64, 67, 69, 72], "read_csv": [29, 66], "read_stat": 67, "readabl": 29, "readi": [24, 34], "readlin": 7, "readm": 25, "real": 43, "realist": 61, "realiz": 40, "realli": 43, "reason": [8, 13, 24, 37, 43], "receiv": 68, "recent": [25, 39, 75], "recip": [24, 49], "reclaim": 61, "recognit": [6, 18, 69], "recommend": [12, 13, 20, 25, 31, 39, 40, 43, 57, 59, 63, 68, 69, 72], "reconstruct": 51, "record": 40, "recov": [46, 56], "red": [23, 24], "red_bia": 24, "reduc": [18, 20, 32, 33, 39, 42, 43, 49, 53, 54, 57, 58, 59, 60, 61, 64, 65, 71], "reduce_axes_0": 33, "reduce_keep_dims_0": 33, "reduce_mean": 33, "reduce_sum": 3, "reduct": [42, 51], "refer": [2, 3, 22, 24, 26, 27, 30, 32, 33, 41, 43, 47, 48, 52, 53, 54, 57, 58, 59, 61, 64, 65, 66, 67, 79], "refin": 61, "regain": [60, 61, 63], "regex": 52, "regim": 42, "regist": 40, "register_buff": [40, 67], "register_op": 19, "register_tf_op": [3, 19], "register_torch_op": 3, "registr": 19, "regnet_y_128fg": 34, "regnet_y_128gf": 34, "regress": [30, 37, 67], "regressor": [18, 37, 66], "regularli": 3, "rel": [34, 42], "relat": [6, 40], "releas": [23, 25, 62, 67, 72], "relev": 30, "reliabl": [25, 42], "reload": 30, "relu": [33, 35, 36, 37, 70, 76], "remain": [6, 34, 52], "remov": [6, 8, 26, 64, 77], "renam": [7, 18, 24, 32], "rename_featur": [20, 24, 30, 32], "render": [61, 77], "repeat": [1, 10, 40], "replac": [41, 52, 69], "replace_custom_layer_nam": 39, "repo": [20, 22, 25, 61], "report": [13, 23, 31, 46, 49, 51, 60], "repositori": [6, 20, 23, 25], "repres": [2, 3, 6, 13, 18, 19, 21, 24, 29, 31, 34, 40, 41, 42, 43, 46, 47, 48, 49, 53, 58, 65, 68, 72], "represent": [13, 20, 22, 26, 33, 38, 40, 43, 48, 52, 53, 54, 64, 67], "repro": 23, "reproduc": 23, "request": [7, 8, 23, 26], "requir": [3, 12, 19, 21, 23, 24, 25, 29, 30, 40, 41, 42, 43, 46, 47, 51, 56, 57, 61, 63, 67, 71, 72, 76, 78], "rerun": 6, "resampl": 8, "research": [7, 26], "reset_default_graph": [7, 72], "reshap": [6, 8, 20, 34, 69], "reshape_1": 69, "reshapefrequ": 21, "resiz": [1, 5, 7, 8, 24, 26, 34, 72], "resize_to": 34, "resnet": [13, 31], "resnet34": [46, 49], "resnet50": [41, 44, 46, 49, 51, 54, 56, 59], "resolut": 21, "resolv": [3, 23], "resourc": 30, "respect": [2, 8, 49, 51, 52, 54, 57, 59, 60, 71], "respond": 23, "respons": [23, 64, 71], "rest": [10, 21], "restore_al": 69, "restore_op_nam": 69, "restrict": [7, 8, 34, 71], "result": [2, 5, 6, 7, 8, 13, 18, 23, 24, 30, 31, 34, 35, 36, 37, 41, 43, 44, 53, 57, 60, 67, 69, 71, 75, 78], "ret": 65, "ret_shap": 19, "retain": [46, 56, 60], "retriev": [7, 26, 69, 78], "return": [3, 5, 6, 8, 10, 19, 21, 24, 29, 30, 31, 33, 34, 35, 36, 37, 42, 57, 61, 63, 65, 67, 70, 72, 75], "return_dict": 11, "reus": [31, 36, 67], "revert": 75, "review": 23, "rewritten": 24, "rfind": 7, "rgb": [24, 34], "rgba": 5, "right": [2, 5, 7, 8, 18, 25, 26, 43, 69, 72, 77, 78], "rigor": 31, "rmse": 30, "roughli": [13, 31, 37], "round": [42, 56, 57, 58, 63], "row": 42, "rtn": [56, 57], "rtol": [69, 70], "rule": 65, "run": [3, 5, 6, 7, 8, 20, 21, 22, 24, 25, 28, 30, 31, 33, 34, 35, 36, 37, 40, 41, 42, 43, 46, 47, 49, 54, 59, 61, 64, 67, 69, 70, 71, 72], "runtim": [2, 20, 21, 22, 24, 40, 41, 44, 49, 53, 58, 59, 60, 62, 63, 65, 67, 71], "safest": 65, "sai": [23, 30, 60], "same": [2, 3, 5, 6, 7, 8, 10, 11, 23, 24, 26, 28, 34, 35, 37, 40, 41, 42, 46, 47, 48, 49, 51, 57, 67, 71, 72], "sampl": [6, 15, 23, 24, 26, 30, 31, 36, 40, 41, 42, 46, 51, 56, 60, 61, 63, 71, 72, 78], "sample_data": [57, 63], "sandler": 26, "sanit": 21, "save": [2, 5, 7, 8, 12, 13, 15, 18, 20, 21, 22, 27, 28, 30, 31, 34, 36, 37, 40, 41, 43, 53, 54, 57, 65, 66, 67, 68, 69, 70, 71, 72, 75, 76, 77, 78, 79], "save_multifunct": 37, "save_spec": [29, 77], "savedmodel": [18, 28, 68, 70], "saver": 69, "scalabl": 2, "scalar": [33, 48], "scale": [7, 8, 20, 24, 26, 40, 47, 56, 57, 58, 59, 62, 63, 65, 70], "scale_1": 40, "scale_2": 40, "scaled_dot_product_attent": [31, 67], "scaler_spec": 30, "scenario": [3, 30, 34, 37, 41, 73], "scene": 33, "schedul": [52, 60], "schema": 40, "scheme": [40, 43, 58, 65, 71], "scikit": [0, 18, 29, 64, 73], "score": [5, 8, 42, 75, 76, 77], "score_valu": 8, "scorer": 6, "scorer_path": 6, "scratch": [31, 33, 36, 70], "script": [6, 13, 23, 25, 28, 36, 47], "scripted_model": [10, 35], "scripter": 36, "scroll": 25, "sdxl": 61, "seamlessli": [2, 40], "search": 0, "seat": 72, "seattl": 29, "sec": 34, "second": [3, 6, 34, 40, 42, 46, 63, 67, 77], "section": [1, 4, 8, 16, 20, 21, 33, 35, 36, 39, 40, 41, 43, 52, 56, 57, 60, 61, 62, 63, 65, 67, 71, 72, 73], "see": [0, 1, 2, 3, 5, 7, 8, 10, 11, 13, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 37, 38, 39, 40, 42, 43, 46, 47, 49, 51, 52, 57, 59, 60, 61, 63, 64, 65, 66, 68, 69, 70, 72, 73, 74, 75, 76, 78, 79], "seed": [42, 60, 61], "seen": 59, "seg_imag": 5, "segment": [2, 6, 12, 13, 18, 30], "segment_id": 15, "segmentationmodel_no_metadata": 78, "segmentationmodel_no_metadata_from_export": 5, "segmentationmodel_no_metadata_from_trac": 5, "segmentationmodel_with_metadata": [5, 30, 78], "select": [5, 8, 18, 26, 32, 35, 49, 54, 57, 59, 71], "selector": 65, "self": [5, 10, 19, 21, 23, 24, 31, 35, 36, 37, 65, 67, 70, 75], "selu": 3, "semant": 5, "send": 23, "sens": 35, "sensit": [30, 42, 49, 54, 59, 60, 62, 63, 65, 71], "sentenc": [11, 15], "sentence_2": 10, "sentence_frag": 10, "separ": [2, 10, 16, 26, 35, 37], "seq2seq": 6, "seq_len": 67, "seqlen": 42, "sequenc": [2, 6, 10, 11, 33, 37, 42, 67], "sequence_length": 11, "sequence_output": 15, "sequenti": [30, 56, 61, 76], "seri": [2, 6, 33], "serial": [2, 7], "serializetostr": 30, "serv": 42, "sess": [7, 69, 72], "session": [7, 26, 37, 64, 67, 69, 72], "session_config": 72, "set": [2, 7, 15, 18, 19, 20, 29, 30, 33, 34, 41, 42, 46, 47, 48, 51, 52, 53, 56, 57, 60, 61, 65, 67, 68, 69, 71, 72, 77, 78], "set_categorical_cross_entropy_loss": 76, "set_epoch": 76, "set_glob": 60, "set_index_typ": 75, "set_multiarray_ndshape_rang": 21, "set_number_of_neighbors_with_bound": 75, "set_op_nam": 30, "set_op_typ": 30, "set_sgd_optim": 76, "set_titl": 72, "setter": 7, "setup": [60, 76], "sever": [2, 24, 31, 34, 35, 36, 39, 42, 43, 46, 47, 49, 51], "sgd": [41, 76], "sgdoptim": 76, "sgdparam": 76, "sh": 25, "shader": 2, "shape": [1, 2, 4, 5, 6, 7, 8, 10, 13, 15, 19, 24, 26, 28, 31, 33, 34, 35, 36, 37, 40, 41, 42, 48, 57, 67, 69, 70, 72, 73, 76, 77], "share": [23, 37, 42, 48, 57, 58], "sharp": 61, "shave": 43, "sheep": [5, 78], "shell": 25, "shift_label": 42, "shift_logit": 42, "ship": 43, "short": [2, 6, 19, 26], "short_descript": [26, 29], "shortdescript": [75, 76, 77], "shot": [51, 52], "should": [7, 8, 19, 20, 21, 23, 25, 30, 34, 35, 36, 40, 42, 43, 52, 54, 56, 65, 77], "shouldn": 65, "show": [1, 3, 5, 7, 8, 11, 19, 21, 24, 25, 26, 29, 30, 33, 34, 37, 47, 48, 52, 67, 69, 72, 78], "shown": [2, 3, 8, 13, 16, 17, 20, 21, 24, 26, 28, 30, 31, 33, 34, 36, 37, 40, 41, 48, 61, 67, 71, 75], "shutil": 34, "side": [2, 5, 26, 78], "sidestep": 5, "signal": 72, "signal_energi": 72, "signific": [42, 46, 63], "significantli": [2, 71], "silu": 31, "similar": [8, 19, 20, 23, 26, 40, 42, 47, 48, 61, 67, 71], "similarli": [40, 49, 63, 72], "simpl": [10, 18, 24, 31, 36, 52, 65, 69, 76, 77], "simpleattent": 67, "simpleattentionwithkeyvaluecach": 67, "simplefilt": 5, "simplenet": 36, "simpli": [20, 31, 37, 47, 52, 63, 76], "simplic": 6, "simplifi": [2, 22, 58, 67], "simul": [3, 46, 56, 57, 60], "sinc": [2, 6, 7, 11, 16, 19, 24, 28, 31, 32, 34, 35, 37, 42, 46, 49, 51, 57, 59, 60, 61, 63, 65, 68, 69, 70, 71, 72], "singl": [2, 21, 22, 30, 33, 35, 37, 43, 48, 58, 63, 69, 71, 72, 78], "sink": 2, "sink_op": 69, "situat": 30, "size": [1, 5, 6, 8, 11, 18, 19, 21, 24, 26, 29, 30, 31, 34, 36, 37, 40, 41, 42, 43, 47, 49, 51, 52, 53, 54, 59, 61, 63, 65, 66, 72, 75], "sketch": [18, 77], "ski": 61, "skip": [16, 25, 30, 35, 37, 52, 54, 60, 61, 65, 72], "skip_layer_typ": 65, "skip_special_token": 11, "sklearn": [29, 66], "skm": 47, "skmpalett": [42, 46, 47, 62, 63], "skmpalettizerconfig": [42, 47, 63], "sky": 61, "slice": 6, "slight": [43, 61], "slightli": [57, 60, 72], "slim": [7, 26], "slow": 59, "slowdown": 59, "slower": [13, 21, 31], "small": [3, 11, 35, 36, 42, 43, 46, 49, 51, 54, 63], "smaller": [30, 42, 43, 47, 60, 63], "smallest": [11, 53], "smith": [11, 29], "snapshot": 67, "snippet": [18, 19, 21, 24, 28, 29, 30, 31, 32, 34, 36, 41, 42, 60, 61, 69, 72], "snr": 72, "so": [2, 6, 8, 13, 15, 16, 19, 20, 21, 23, 24, 26, 28, 30, 31, 34, 35, 40, 53, 54, 57, 60, 61, 65, 67, 71, 72, 76], "soc": 41, "sofa": [5, 78], "soft": 46, "softmax": [7, 70, 76], "softwar": [23, 25, 71], "sole": 63, "solv": 51, "some": [2, 7, 10, 20, 21, 28, 35, 40, 41, 42, 43, 46, 47, 57, 61, 62, 65, 67, 69, 71, 78], "someth": [8, 63], "sometim": 59, "somewher": 69, "sonoma": 34, "sophist": 2, "sort": [19, 51], "sourc": [2, 3, 7, 9, 15, 20, 21, 26, 28, 32, 33, 34, 39, 40, 71, 73, 75], "source_model": [16, 17, 20, 24, 32, 71], "source_torch_model": 32, "space": [20, 34, 39, 40, 61, 64, 65, 77], "spars": [41, 42, 43, 46, 52, 53, 54, 60, 61, 62], "sparse_gpt_config": 52, "sparse_model": 61, "sparse_palettized_model": 61, "sparse_weight": 30, "sparsegpt": 62, "sparsif": 52, "sparsifi": [53, 62], "sparsiti": [30, 51, 52, 53, 54, 60, 61, 63], "spatial": 72, "spec": [16, 18, 20, 21, 30, 68, 72, 75, 77], "spec_data": 30, "spec_valu": 75, "special": [1, 2, 10, 30, 34, 37, 40, 71], "specializationstrategi": 34, "specif": [2, 4, 7, 19, 21, 23, 24, 30, 34, 36, 37, 38, 42, 43, 44, 46, 47, 61, 63, 67, 71, 72, 77], "specifi": [2, 7, 8, 10, 17, 19, 20, 21, 22, 23, 24, 26, 27, 28, 30, 31, 32, 33, 37, 38, 41, 42, 47, 48, 52, 57, 60, 70, 71], "specificationvers": 77, "speech": [6, 18, 69], "speed": [20, 34, 41, 42, 49, 54, 67], "speedup": 60, "spent": 42, "splitlin": [8, 26], "spot": [60, 72], "sqrt": 70, "squar": [29, 46, 67, 76], "squeez": [8, 72], "squeezellm": [42, 46], "src_function_nam": 37, "stabilityai": 61, "stabl": [13, 25, 30, 34, 61], "stable_diffusion_version_stabilityai_st": 61, "stablediffus": 44, "stack": [2, 6, 20, 56], "stackoverflow": [7, 72], "stage": [6, 23, 37, 57], "stai": 60, "stand": 78, "standard": [5, 6, 8, 23, 24, 43, 46, 49, 51], "star": 77, "stark": 42, "start": [0, 1, 6, 8, 13, 14, 24, 31, 32, 33, 34, 37, 38, 40, 41, 42, 46, 47, 48, 49, 51, 52, 53, 57, 60, 61, 63, 64, 67, 77, 78], "state": [4, 6, 13, 31, 39, 42, 49, 54, 59, 60], "state1": 67, "state2": 67, "state_dict": 67, "stateless": 67, "statement": [3, 5, 15], "statetensorspec": 67, "statetyp": [31, 67], "static": [19, 20, 32, 42], "statist": [49, 54, 57, 59, 60], "statu": [13, 23, 31], "std": [5, 8, 24], "stddev": 69, "steel": 61, "step": [1, 3, 5, 6, 7, 8, 12, 13, 15, 21, 23, 25, 26, 28, 29, 31, 34, 36, 37, 39, 41, 46, 47, 52, 57, 60, 61, 63, 64, 67, 69, 70, 72, 77, 78], "still": [2, 20, 21, 23, 26, 28, 30, 31, 34, 35, 40, 46, 49, 51, 52, 60, 61, 68], "stochast": 76, "storag": [7, 8, 20, 26, 39, 40, 53, 54, 64, 65], "store": [2, 15, 24, 34, 40, 42, 48, 53, 54, 58, 67, 76], "stori": 61, "str": 7, "straight": 57, "straightforward": 2, "strategi": [43, 67, 76], "strict": 67, "strictli": 64, "string": [3, 24, 26, 30, 54, 76, 78], "stringinputtyp": 19, "strip": 6, "stroke": 77, "strongli": [2, 71], "structur": [30, 35, 51, 52, 53, 54], "studi": 51, "style": [21, 37, 61, 71, 72], "styliz": [24, 72], "sub": 42, "subgraph": 6, "submit": [18, 23], "submodel": 37, "submodul": 46, "subplot": 72, "subsect": 40, "subsequ": [2, 21, 34, 67], "subset": [42, 46, 51], "subspac": 62, "substanti": 46, "subwai": 10, "succe": 40, "suddenli": 11, "suffici": [2, 16, 46, 51, 52, 56, 72], "suggest": 56, "suit": 43, "suitabl": [11, 72], "sum": [67, 72], "summar": [16, 32, 47, 60, 61, 68], "summat": 3, "sunni": 61, "super": [5, 10, 19, 21, 31, 35, 36, 37, 65, 67, 70], "support": [3, 10, 13, 16, 20, 29, 31, 33, 34, 35, 40, 43, 46, 47, 48, 51, 58, 61, 62, 63, 65, 67, 71], "supportvectorregressor": 29, "sure": [3, 20, 23, 26, 34, 52, 67], "svm": 2, "svm_paramet": 27, "svm_problem": 27, "svm_train": 27, "svmutil": 27, "sweet": 60, "swift": [2, 18, 28, 34, 71], "sy": 7, "symbol": [19, 57], "symmetr": [20, 40, 41, 56, 57, 60, 65], "system": [6, 7, 10, 16, 24, 25, 26, 34, 71, 72], "t": [3, 6, 13, 17, 19, 21, 23, 28, 30, 32, 33, 34, 35, 36, 42, 61, 65, 67, 78], "t5": 3, "t_start": 67, "tab": [1, 5, 15, 21, 24, 25, 26, 37, 49, 54, 59, 75, 78], "tabl": [16, 40, 41, 42, 43, 46, 47, 48, 49, 51, 59, 60, 61, 62, 63, 65, 68, 78], "tailor": 33, "take": [5, 6, 8, 10, 11, 13, 18, 20, 26, 28, 32, 33, 34, 40, 41, 42, 43, 46, 47, 52, 53, 56, 60, 61, 62, 63, 67, 69, 71, 72, 77], "taken": [34, 42, 60, 61], "talk": 46, "tall": 61, "tanh": 70, "tar": 7, "tarfil": 7, "target": [16, 17, 19, 24, 26, 28, 30, 32, 37, 38, 40, 41, 47, 48, 51, 53, 57, 60, 63, 67, 76], "target_function_nam": 37, "target_modul": 37, "target_spars": [41, 52, 60, 61], "task": [6, 25, 37, 42, 49, 59, 63], "team": 23, "techniqu": [6, 20, 33, 39, 41, 43, 44, 53, 54, 56, 60, 61, 63], "technologi": 64, "tell": [23, 76], "temb": 61, "tempfil": 69, "templat": 23, "temporari": 34, "tend": 42, "tensor": [2, 3, 5, 6, 7, 10, 11, 16, 19, 24, 28, 31, 32, 33, 35, 36, 40, 41, 42, 46, 47, 48, 49, 52, 53, 57, 58, 60, 61, 62, 65, 67, 69, 72], "tensorflow": [0, 2, 3, 8, 16, 20, 21, 23, 25, 26, 32, 33, 34, 38, 64, 73, 78], "tensorflow1": [7, 72], "tensorflow_hub": [15, 70], "tensorinputtyp": 19, "tensorspec": [33, 67, 70], "tensortyp": [5, 6, 8, 10, 13, 21, 24, 28, 31, 32, 34, 35, 37, 41, 57, 67, 69], "tenth": 47, "term": [6, 53, 63, 65, 76], "termin": [25, 35], "test": [5, 10, 11, 13, 15, 21, 23, 26, 30, 31, 33, 34, 41, 43, 65, 67, 69, 70, 72, 78], "testconvmodul": 21, "text": [2, 6, 8, 10, 11, 15, 34, 37, 61], "textencod": 61, "tf": [3, 6, 7, 18, 20, 23, 24, 26, 28, 32, 68, 69, 70, 72, 77], "tf1": [20, 23, 33], "tf2": [20, 23], "tf_alia": 19, "tf_frozen": 69, "tf_graph": 69, "tf_hub": [15, 70], "tf_input_nam": 7, "tf_keras_model": 70, "tf_model": [6, 15, 19, 20, 24, 28, 69, 70], "tf_model_path": 7, "tf_op_registri": [3, 19], "tf_out": [7, 69, 70], "tf_out_tensor": 72, "tf_output_nam": 7, "tfcoreml": 39, "tfdistilbertformaskedlm": 15, "tfhub": [15, 70], "tft5model": 3, "than": [2, 13, 17, 19, 21, 24, 26, 28, 30, 31, 33, 34, 35, 37, 42, 46, 47, 51, 52, 53, 57, 59, 60, 61, 63, 65, 67, 68], "thei": [2, 19, 23, 24, 40, 47, 48, 57, 59, 63, 71], "them": [2, 5, 6, 7, 11, 13, 18, 22, 23, 24, 25, 26, 30, 31, 33, 34, 37, 40, 43, 46, 57, 63, 67, 68, 71, 76, 78], "therebi": [20, 34, 40, 48, 53, 54], "therefor": [2, 8, 10, 19, 24, 26, 28, 34, 51, 59, 72], "thi": [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 15, 16, 19, 20, 21, 23, 24, 25, 26, 28, 29, 30, 31, 33, 34, 35, 37, 40, 41, 42, 43, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 60, 61, 62, 63, 65, 67, 68, 69, 70, 71, 72, 73, 75, 76, 77, 78], "third": [33, 40, 64], "those": [6, 10, 30, 37, 54, 57, 63], "though": [47, 63], "three": [2, 6, 8, 24, 29, 37, 43, 46, 51, 53, 58, 63, 67, 72], "threshold": [30, 40, 51, 52], "through": [2, 5, 8, 13, 23, 31, 35, 36, 39, 52, 56, 57, 58, 60, 61, 64], "throughput": 59, "thu": [35, 42, 46], "thumb": 23, "ti": [2, 71], "tick": 34, "time": [6, 7, 10, 11, 13, 18, 20, 31, 35, 41, 42, 43, 46, 47, 49, 51, 54, 59, 60, 63, 67, 72], "timestep": 61, "tini": 77, "tinydrawingclassifi": 77, "tinydrawingembed": 77, "tip": 37, "titl": 26, "tmp": 6, "to_typ": 30, "togeth": 48, "toi": 13, "token": [10, 15, 42, 67], "token_id": 67, "token_predictor": 10, "tokenized_prompt": 11, "tokenized_train_data": 42, "tolist": 11, "too": [47, 61], "tool": [5, 6, 7, 8, 11, 12, 13, 14, 15, 16, 17, 18, 21, 22, 23, 24, 26, 29, 31, 32, 33, 37, 40, 42, 51, 56, 58, 61, 65, 67, 68, 69, 70, 75, 76, 77], "top": [1, 6, 8, 19, 26, 33, 35, 49, 59, 60, 61], "top_3_indic": 8, "top_3_indices_coreml": 8, "topic": [1, 33, 41, 75], "topk": 19, "topkv2": 19, "torch": [3, 5, 10, 11, 13, 28, 31, 34, 35, 36, 37, 42, 46, 49, 51, 54, 56, 57, 59, 60, 61, 62, 63, 67], "torch2coreml": 61, "torch_dtyp": [11, 42], "torch_kvcache_output": 67, "torch_model": [5, 8, 11, 13, 24, 28, 34, 47, 61, 67], "torch_model_kvcach": 67, "torch_op_registri": 3, "torch_out": [8, 10], "torch_out_np": 8, "torch_output": 67, "torch_predict": 5, "torchscript": [8, 10, 24, 28, 35, 36, 68], "torchvis": [5, 12, 13, 18, 24, 28, 34, 41, 49, 59, 60], "torchvision_mobilenet_v2": 28, "total": [13, 30, 31], "totensor": 5, "tour": 43, "toymodel": 67, "toymodelwithkeyvaluecach": 67, "trace": [3, 12, 13, 18, 21, 28, 32, 34, 37, 40, 41, 47, 57, 63, 67], "traceabl": 57, "traceback": 75, "traced_model": [5, 8, 13, 21, 24, 28, 34, 41, 47, 57, 67], "traced_model_kvcach": 67, "traced_palettized_model": 63, "traced_token_predictor": 10, "tracer": [5, 10, 13, 24, 36], "track": 67, "trade": [42, 43, 60, 63], "tradeoff": 46, "train": [1, 2, 5, 6, 7, 8, 10, 12, 13, 18, 24, 26, 28, 29, 30, 31, 34, 35, 36, 41, 42, 43, 46, 49, 54, 58, 59, 60, 61, 64, 66, 71, 72, 76, 77, 78, 79], "train_data": 42, "train_dataload": 52, "train_load": 60, "train_step": [47, 52, 60], "trainabl": [15, 70], "training_util": 47, "traininginput": [75, 76, 77], "transcript": 6, "transfer": [21, 72], "transform": [2, 5, 6, 8, 10, 11, 14, 18, 22, 24, 31, 42, 43, 46, 51, 56, 62, 67, 71, 78], "transit": 39, "translat": [2, 3, 13, 19, 20, 31, 33], "transpar": [6, 61], "transpos": [3, 8, 33, 34], "transpose_i": 3, "transpose_perm_0": 33, "transpose_x": 3, "tree": [2, 24, 26, 61, 64], "tri": 28, "triag": 23, "trial": [46, 51], "triviaqa": 42, "troubleshoot": 20, "true": [3, 5, 6, 7, 8, 10, 11, 13, 19, 30, 34, 35, 41, 47, 52, 57, 60, 65, 69, 70, 75, 76, 77], "truncated_norm": 69, "trust_remote_cod": 11, "try": [5, 13, 20, 23, 31, 43, 60, 61, 63, 64, 65, 75], "tune": [18, 37, 42, 43, 49, 51, 52, 54, 62, 64, 76], "tupl": 5, "turn": [42, 57, 60, 71], "turquois": 23, "tutori": [13, 42, 61, 64], "tuxedo": 61, "tvm": [26, 34], "tvo": [13, 16, 39, 68], "tvormonitor": [5, 78], "tvos15": [16, 17, 24, 38, 68], "tvos16": [48, 53], "twice": 35, "two": [6, 30, 34, 35, 37, 39, 40, 42, 47, 54, 57, 60, 61, 63, 68, 72, 76], "txt": [6, 7, 8, 26], "type": [0, 1, 2, 4, 5, 6, 7, 8, 9, 13, 15, 17, 19, 20, 23, 26, 28, 29, 37, 39, 40, 42, 44, 46, 47, 52, 53, 57, 58, 62, 65, 67, 69, 70, 73, 74, 76, 77], "type_domain": 19, "type_infer": 19, "typic": [8, 23, 24, 28, 37, 43, 46, 47, 49, 51, 53, 54, 56, 57, 63, 67, 69, 70, 71], "typo": 23, "u": [5, 7, 8, 10, 13, 23, 25, 26, 42, 60, 72, 76], "ui": [26, 67], "uint8": [5, 34, 41, 47], "unabl": 61, "uncompress": [41, 42, 46, 49, 51, 63], "uncompressed_model_path": [47, 63], "uncompressed_torch_model": 63, "under": [6, 23, 26, 62, 63, 68, 78], "undergo": [2, 62], "underli": 34, "understand": [2, 8, 13, 23, 26, 67, 71], "undetermin": 21, "unet": 61, "unet2dconditionmodel": 61, "unet2dconditionmodelxl": 61, "unetmidblock2dcrossattn": 61, "unexpect": [23, 30], "unif": 2, "unifi": [1, 2, 6, 8, 13, 14, 16, 20, 24, 26, 28, 32, 33, 38, 39, 64, 68, 70, 73], "uniform": [47, 48], "uniqu": [30, 37, 40, 47], "unique_valu": 30, "unit": [2, 7, 8, 20, 43, 49, 54, 59, 61, 65, 70, 71], "unknown": [69, 77], "unless": [20, 49, 51, 54, 59], "unlik": [32, 58, 71], "unnecessari": 6, "unpreced": 34, "unsign": 40, "unsqueez": [5, 11], "unstructur": [51, 53, 54], "unsupport": [2, 3, 19, 33], "until": [10, 11], "untyp": 28, "unus": [22, 60], "unzip": 7, "up": [5, 20, 21, 23, 26, 30, 32, 34, 37, 40, 41, 42, 43, 46, 51, 52, 54, 60, 61, 63, 65, 67, 78], "up_block": 61, "upblock2d": 61, "updat": [0, 2, 6, 20, 25, 30, 37, 39, 40, 46, 47, 57, 61, 62, 67, 75], "updatableknn": 75, "updatablemnistdigitclassifi": 76, "update_step": [52, 60], "updated_model": 30, "upgrad": [20, 39], "upon": [11, 52, 56], "upper": 21, "upper_bound": [21, 67], "url": [7, 76], "urllib": [5, 7, 8, 26], "urlopen": [8, 26], "urlretriev": 7, "us": [0, 1, 2, 4, 5, 7, 8, 10, 11, 13, 14, 15, 16, 17, 18, 22, 23, 25, 27, 28, 31, 33, 36, 37, 38, 39, 41, 42, 43, 44, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 61, 62, 63, 64, 65, 66, 68, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79], "usag": [30, 34, 52], "use_cach": [11, 42], "usecpuonli": [7, 34, 69], "user": [2, 3, 18, 21, 25, 27, 31, 39, 40, 43, 47, 57, 64, 71, 77], "user_defined_metadata": [5, 15, 26, 78], "userwarn": 7, "usual": [3, 34, 47, 54, 56, 67, 71], "utf": 8, "utf8": 26, "util": [0, 1, 6, 19, 20, 21, 23, 28, 29, 34, 37, 39, 41, 43, 47, 52, 61, 65, 67, 76, 77], "v": [2, 31, 42, 67], "v0": 5, "v1": [7, 69], "v2": 69, "v_cach": 67, "vae": 61, "vaedecod": 61, "val": [19, 30, 33], "valid": [6, 31, 36, 46, 51, 72, 75], "valu": [5, 6, 7, 8, 11, 16, 19, 24, 26, 28, 30, 33, 34, 35, 37, 40, 41, 42, 43, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 62, 65, 67, 68, 69, 70, 71, 76], "valueerror": [19, 75], "values_vector": 8, "var": 2, "vari": [34, 42, 43, 44, 49, 54, 59, 60, 61, 63, 65, 71, 72], "variabl": [2, 32, 36, 69, 71], "variant": [6, 11, 15, 37, 61], "variat": 37, "varieti": [3, 29, 51, 69], "variou": [19, 32, 41, 42, 43, 56, 57, 60, 61], "vase": 8, "ve": 35, "vector": [2, 37, 40, 53, 64, 75, 77], "vehicl": 26, "venv": 25, "veri": [8, 24, 34, 35, 42, 43, 46, 61, 63], "verifi": [3, 24, 26, 34, 64, 67, 69, 72, 75], "versa": 30, "version": [2, 3, 7, 8, 10, 11, 12, 13, 15, 16, 17, 23, 24, 26, 28, 29, 31, 32, 34, 35, 42, 43, 44, 46, 47, 49, 54, 59, 60, 61, 62, 63, 67, 70, 71, 72, 77], "version_info": 7, "via": [13, 30, 31, 37, 40, 41, 52, 57, 62, 63], "vibrant": 34, "vice": 30, "video": [37, 64], "view": [6, 24, 42, 43, 61], "viewer": 39, "vision": [5, 8, 24, 26, 64], "visit": 43, "visual": [2, 34, 39], "visualize_spec": 39, "vit": [13, 31], "voc": 5, "vocab_s": 67, "w": [24, 60, 61, 69, 70], "w1": 31, "w16a16": 59, "w2": 31, "w3": 31, "w4": 41, "w8a8": [43, 57, 59, 60, 62], "w_quantiz": 58, "w_unquant": 58, "wa": [2, 8, 10, 11, 12, 17, 26, 40, 42, 52, 60, 67, 68, 72, 76, 77], "wai": [2, 13, 23, 24, 25, 26, 28, 30, 31, 34, 36, 40, 41, 42, 43, 46, 52, 60, 63, 64, 67, 70, 71], "walk": [23, 64], "want": [2, 5, 10, 19, 20, 24, 30, 35, 40, 41, 43, 57, 63, 67, 71], "warm": 61, "warn": [5, 7, 10, 21, 35], "watcho": [13, 16, 26, 39, 68], "watchos8": [16, 17, 24, 38, 68], "watchos9": [48, 53], "wav": 6, "wave": 72, "we": [5, 7, 11, 13, 20, 23, 25, 28, 30, 31, 33, 36, 40, 41, 42, 46, 47, 49, 51, 52, 56, 57, 60, 61, 67, 69, 75, 76], "web": 10, "weight": [0, 2, 6, 16, 18, 20, 26, 28, 37, 39, 41, 42, 43, 46, 47, 48, 49, 51, 52, 53, 54, 58, 59, 60, 61, 62, 63, 69, 70, 76], "weight_dtyp": [42, 52, 57], "weight_file_path": 30, "weight_metadata": 30, "weight_metadata_dict": 30, "weight_nam": 30, "weight_quant_model_config": 63, "weight_quant_op_config": 63, "weight_scal": 40, "weight_threshold": [30, 47, 52, 57], "weighting_schem": [75, 77], "weights_data": 30, "weights_dir": 30, "welcom": 23, "well": [5, 24, 26, 37, 40, 42, 43, 46, 47, 48, 51, 52, 56, 57, 58, 59, 60, 71, 72, 78], "were": [5, 35, 39, 40, 42, 46, 47, 49, 51, 54, 59, 61], "what": [0, 1, 15, 20, 23, 26, 30, 33, 35, 37, 39, 43, 44, 48, 53, 63, 69, 75, 76], "wheel": [25, 39], "when": [2, 7, 8, 10, 13, 19, 20, 21, 23, 24, 26, 28, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 46, 47, 48, 49, 53, 54, 57, 58, 59, 63, 65, 67, 71, 72], "whenev": [3, 19, 25], "where": [30, 31, 37, 40, 41, 42, 43, 46, 47, 48, 51, 53, 54, 57, 60, 61, 67], "wherea": [42, 60], "whether": [16, 28, 40, 43, 71], "which": [1, 2, 3, 5, 6, 7, 8, 10, 11, 19, 20, 21, 22, 23, 24, 25, 26, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 39, 40, 41, 42, 43, 44, 46, 47, 48, 49, 51, 52, 53, 54, 56, 57, 58, 59, 60, 61, 63, 64, 67, 68, 69, 71, 72, 76, 77], "whichoneof": [8, 16, 29], "while": [2, 3, 6, 10, 18, 19, 21, 24, 28, 34, 37, 38, 40, 42, 43, 46, 47, 49, 52, 53, 56, 60, 61, 63, 64, 65, 68, 71, 72, 77], "white": 77, "whiten": 24, "whl": 25, "whole": [35, 37, 48, 53, 58, 71], "whose": [22, 34, 40, 41, 43], "why": 33, "wide": [28, 34, 61, 64, 68], "width": [6, 24, 34, 76, 77], "width_multipli": 78, "wife": 11, "wikipedia": 7, "wikitext2": 42, "wise": [40, 42], "within": [7, 10, 19, 21, 26, 37, 42, 53, 60, 61, 65, 71, 75], "without": [2, 5, 6, 12, 24, 28, 40, 47, 53, 57, 60, 63, 65, 67, 68, 70, 75], "wk": 31, "wo": 31, "won": [13, 35, 36], "word": [10, 11], "work": [2, 5, 8, 11, 13, 18, 21, 24, 30, 31, 34, 43, 46, 51, 56, 60, 63, 65, 67, 69, 70, 71, 78], "workaround": [20, 37], "workflow": [0, 9, 12, 14, 18, 23, 30, 33, 34, 40, 43, 47, 52, 56, 59, 60, 62], "world": [34, 64], "worri": 36, "would": [7, 21, 30, 33, 34, 35, 37, 42, 47, 52, 53, 57, 60, 69, 71, 72], "wq": 31, "wrap": [1, 5, 34], "wrapped_typ": 67, "wrappeddeeplabv3resnet101": 5, "write": [3, 20, 33, 64, 65, 67], "write_graph": 69, "written": [57, 76], "wv": 31, "wwdc": [37, 43, 67], "www": 70, "x": [3, 5, 7, 8, 10, 19, 21, 23, 24, 27, 30, 31, 33, 34, 35, 36, 37, 67, 69, 70, 72, 76, 77], "x_1": 67, "x_2": 67, "x_blue_channel": 24, "x_green_channel": 24, "x_red_channel": 24, "x_shape": 19, "x_type": 19, "xception": [28, 70], "xcode": [0, 2, 8, 15, 20, 21, 28, 29, 34, 37, 49, 54, 59, 67, 71], "xcrun": 2, "xgboost": [0, 18, 64, 73], "xl": 61, "y": [3, 27, 30, 31, 35, 67, 69, 70, 72], "y_1": [37, 67], "y_2": [37, 67], "y_blue_channel": 24, "y_green_channel": 24, "y_red_channel": 24, "yaml": [47, 52, 57], "ye": 40, "yet": [2, 5], "yield": [41, 61], "you": [1, 2, 3, 5, 6, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 47, 49, 51, 56, 57, 59, 60, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 78, 79], "your": [1, 2, 3, 6, 7, 8, 13, 17, 18, 19, 20, 21, 23, 24, 25, 26, 30, 32, 33, 34, 35, 36, 37, 39, 40, 43, 44, 46, 51, 57, 59, 63, 64, 65, 67, 70, 71, 72, 77], "z": 31, "zero": [6, 11, 15, 34, 35, 40, 41, 43, 51, 52, 53, 54, 56, 57, 58, 62, 67], "zero_grad": 41, "zero_point": [40, 58], "zero_point_1": 40, "zero_point_2": 40, "zhmoginov": 26, "zhu": 26, "zsh": 25}, "titles": ["Core ML Tools", "Classifiers", "Comparing ML Programs and Neural Networks", "Composite Operators", "Conversion Options", "Converting a PyTorch Segmentation Model", "Converting a TensorFlow 1 DeepSpeech Model", "Converting a TensorFlow 1 Image Classifier", "Converting a torchvision Model from PyTorch", "Converting Deep Learning Models", "Converting a Natural Language Processing Model", "Converting an Open Efficient Language Model", "Converting from PyTorch", "PyTorch Conversion Workflow", "Converting from TensorFlow", "Converting TensorFlow 2 BERT Transformer Models", "Convert Models to ML Programs", "Convert Models to Neural Networks", "Examples", "Custom Operators", "Core ML Tools FAQs", "Flexible Input Shapes", "Graph Passes", "Contributing", "Image Input and Output", "Installing Core ML Tools", "Getting Started", "LibSVM", "Load and Convert Model Workflow", "MLModel Overview", "MLModel Utilities", "Model Exporting", "Model Input and Output Types", "Model Intermediate Language", "Model Prediction", "Model Scripting", "Model Tracing", "Multifunction Models", "New Conversion Options", "New Features", "Conversion", "Combining Compression Types", "Optimizing OPT Model", "Overview", "Examples", "Palettization", "Palettization Algorithms", "API Overview", "Palettization Overview", "Performance", "Pruning", "Pruning Algorithms", "API Overview", "Overview", "Performance", "Linear Quantization", "Quantization Algorithms", "API Overview", "Quantization Overview", "Performance", "Optimizing ResNet50 Model", "Optimizing StableDiffusion Model", "What\u2019s New", "Optimization Workflow", "What Is Core ML Tools?", "Compressing Neural Network Weights", "Scikit-learn", "Stateful Models", "Source and Conversion Formats", "TensorFlow 1 Workflow", "TensorFlow 2 Workflow", "Typed Execution", "Typed Execution Workflow Example", "Core ML Tools API Overview", "Updatable Models", "Nearest Neighbor Classifier", "Neural Network Classifier", "Pipeline Classifier", "Xcode Model Preview Types", "XGBoost"], "titleterms": {"": [5, 20, 62], "1": [6, 7, 18, 19, 28, 40, 60, 65, 68, 69, 72], "13": 16, "16": [16, 24, 65], "2": [10, 15, 18, 19, 28, 60, 68, 70], "3": [19, 39], "32": 72, "4": [19, 20, 39], "5": 20, "6": 20, "7": [20, 62], "8": [39, 62, 65], "A": 67, "For": [13, 18, 25, 39, 65], "In": 30, "Into": 6, "Not": 35, "One": 6, "The": [1, 10], "With": [20, 24, 29, 63, 70], "about": [33, 34], "accumul": 67, "accur": 35, "accuraci": [51, 56], "activ": [40, 56, 57, 58, 60], "adapt": 37, "add": 24, "addit": 64, "advantag": 21, "algorithm": [42, 46, 51, 56], "all": 30, "an": [6, 11, 20, 24, 38, 41, 51, 72, 77], "api": [0, 1, 21, 47, 52, 56, 57, 62, 63, 73], "appl": 43, "appli": 76, "applic": 67, "appropri": 71, "ar": 65, "arrai": [30, 34], "asr": 6, "attent": 67, "audio": 6, "avail": [16, 21, 43, 48, 53, 62], "awar": 57, "base": [42, 52, 56, 57, 63, 76], "befor": 7, "beginn": 25, "behavior": 32, "benchmark": [49, 51, 54, 59], "benefit": 2, "bert": [15, 70], "better": 20, "bisect": 30, "bit": 65, "bodi": 78, "build": 25, "cach": 67, "calibr": [42, 52, 56, 57, 63], "can": 7, "captur": 13, "chang": 30, "channel": [42, 48, 61], "choos": [20, 51, 71], "class": [8, 19], "classifi": [1, 7, 18, 30, 75, 76, 77], "code": 23, "combin": [37, 41], "compar": [2, 8, 72], "comparison": 72, "compil": [20, 34], "composit": [3, 18, 19], "compress": [40, 41, 42, 43, 60, 61, 63, 65], "comput": [20, 28, 30, 34, 38, 72], "conclus": 42, "concret": 70, "conda": 25, "contribut": 23, "control": 65, "convers": [0, 3, 4, 13, 18, 20, 28, 32, 38, 40, 68], "convert": [0, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 24, 26, 28, 30, 31, 33, 38, 39, 40, 47, 52, 57, 67, 69, 70, 72, 76], "core": [0, 2, 7, 8, 10, 11, 13, 20, 21, 25, 29, 33, 39, 47, 52, 57, 62, 64, 67, 68, 73], "coreml": 61, "coremltool": 20, "creat": [33, 67, 75, 76, 77], "custom": [18, 19, 20, 65], "data": [42, 52, 56, 57, 60, 63], "dataset": 63, "declar": 20, "decompos": 3, "deep": [9, 20], "deepspeech": 6, "default": [16, 20, 21, 32, 68], "defin": [19, 70, 76], "definit": 7, "deploy": [13, 39, 68], "deprec": [34, 39], "descript": [29, 30], "detail": 2, "develop": [19, 68], "differ": [2, 31, 56, 68], "differenti": [46, 47], "dimens": 21, "distilbert": 15, "document": 23, "doubl": 30, "download": [7, 8, 26, 28, 72], "dtype": 32, "dynam": [6, 21], "each": [21, 63], "effect": [42, 43], "effici": 11, "embed": [40, 77], "enabl": 21, "encod": 10, "engin": 20, "enumer": 21, "environ": 25, "error": 20, "evalu": [8, 13, 30], "exampl": [3, 18, 24, 26, 28, 29, 30, 34, 37, 44, 47, 67, 69, 72, 78], "execut": [18, 38, 71, 72], "exist": 3, "export": [8, 11, 21, 31, 69], "exportedprogram": 13, "faq": 20, "fast": 34, "faster": 20, "featur": [30, 39, 43, 48, 53, 68], "feed": 6, "file": [6, 68], "filter": 24, "find": 16, "fine": [56, 60, 63], "fix": 20, "flag": 34, "flexibl": [18, 20, 21], "float": [16, 24, 30, 65], "foat": 72, "follow": 28, "format": [65, 68, 70, 76], "fragment": 10, "framework": 64, "free": [42, 52, 56, 57, 60, 63], "frequenc": 21, "from": [8, 12, 14, 21, 25, 28, 31, 34, 39], "frozen": 69, "function": [3, 65, 70], "get": [8, 26, 30, 77], "gpt": 10, "gptq": 56, "granular": [48, 58], "graph": [7, 13, 22, 69], "grayscal": 24, "group": 61, "handl": 20, "high": 20, "hint": 21, "how": [6, 43, 58, 68], "hub": 15, "i": [20, 64], "imag": [5, 7, 8, 18, 20, 24, 32, 34, 69], "imagetyp": 24, "impact": [42, 56], "implement": 19, "import": [3, 10, 11, 24], "improv": 68, "index": 75, "info": [1, 40, 49, 59], "inform": 13, "initi": 20, "input": [5, 6, 7, 8, 10, 18, 20, 21, 24, 29, 30, 32, 34, 38, 69], "instal": [3, 23, 25, 72], "instruct": 23, "intermedi": [18, 33], "ios15": 62, "issu": 23, "its": 76, "jit": 35, "joint": [41, 61], "jointli": 40, "k": [46, 47], "kera": [20, 70], "kv": 67, "label": [8, 23], "languag": [10, 11, 18, 33], "last": 19, "latenc": [42, 60], "layer": [19, 65], "learn": [9, 20, 33, 34, 66], "librari": [10, 11, 64], "libsvm": 27, "limit": [31, 36], "linear": [18, 55], "load": [5, 7, 8, 26, 28, 29, 76], "lora": 37, "lower": 62, "lut": [48, 65], "maco": [25, 34], "macos12": 62, "magnitudeprun": 51, "mai": 35, "make": [7, 8, 26, 72, 76], "make_updat": 76, "mean": [46, 47], "memori": 30, "metadata": [5, 26, 29, 30], "method": 39, "methodologi": [46, 49, 51, 54, 59], "migrat": 39, "mil": [2, 3, 18, 19, 33, 67], "minim": 60, "minimum": [13, 68], "mix": 35, "ml": [0, 2, 7, 8, 10, 11, 13, 16, 18, 20, 21, 24, 25, 26, 29, 33, 38, 39, 47, 52, 57, 62, 64, 67, 68, 71, 72, 73, 76], "mlmodel": [0, 18, 29, 30], "mlmultiarrai": 24, "mlpackag": 41, "mobilenetv2": 8, "mode": [8, 13, 56], "model": [1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 13, 15, 16, 17, 18, 19, 20, 21, 24, 26, 28, 30, 31, 32, 33, 34, 35, 36, 37, 40, 41, 42, 47, 49, 52, 57, 59, 60, 61, 63, 67, 69, 70, 72, 74, 76, 77, 78], "more": [3, 13, 33, 34, 69], "multi": [21, 30, 34], "multiarrai": 24, "multifunct": 37, "name": [20, 32], "natur": 10, "nearest": [75, 77], "neighbor": [75, 77], "network": [2, 17, 20, 21, 26, 38, 65, 71, 76], "neural": [2, 17, 20, 21, 26, 38, 65, 71, 76], "new": [25, 38, 39, 62], "newer": 16, "nlp": 10, "nn": 20, "normal": 5, "note": 39, "now": 24, "number": 75, "numer": 20, "obtain": 13, "older": 39, "onli": [21, 65], "op": [3, 20], "open": [5, 11, 24, 78], "oper": [3, 18, 19, 68], "opt": 42, "optim": [0, 18, 20, 21, 42, 60, 61, 62, 63], "option": [4, 18, 24, 28, 38, 65], "origin": 13, "other": 0, "output": [8, 20, 24, 29, 30, 32, 34, 38, 72], "overview": [0, 18, 29, 33, 43, 47, 48, 52, 53, 57, 58, 73, 78], "packag": [16, 25], "palett": [40, 41, 42, 45, 46, 47, 48, 60, 61], "paramet": 72, "parti": 25, "pass": [22, 40], "per": [42, 48], "perform": [21, 43, 49, 54, 59, 68], "pick": 38, "pipelin": [40, 77], "plan": 30, "pose": 78, "post": [47, 51, 56, 63], "posttrainingpalett": 61, "pre": [3, 69, 70], "prealloc": 21, "precis": [16, 38, 58, 71, 72], "predetermin": 21, "predict": [7, 8, 18, 26, 29, 34, 67, 72], "preprocess": [6, 7, 8, 20, 24], "prerequisit": 25, "preview": [1, 18, 78], "previou": [20, 39], "print": 8, "process": 10, "produc": [1, 26, 68], "program": [2, 16, 18, 24, 26, 33, 38, 71, 72], "prompt": 11, "protobuf": 8, "protocol": 40, "provid": [21, 32], "prune": [50, 51, 52, 60, 61], "python": [25, 34], "pytorch": [3, 5, 8, 10, 11, 12, 13, 18, 20, 24, 28, 32, 39, 40, 47, 56, 57, 67, 68], "qat": 57, "quantiz": [20, 40, 41, 42, 48, 55, 56, 57, 58, 60, 65], "queri": 23, "quick": 18, "rang": 21, "recommend": 70, "reduct": 60, "refer": [0, 21], "regist": [3, 19, 67], "regressor": 30, "releas": [20, 39], "renam": 30, "represent": 2, "requir": [5, 7, 8, 10, 11, 15, 16, 26, 31, 34], "reshap": 21, "resnet50": 60, "resort": 19, "resourc": 64, "result": [42, 46, 49, 51, 54, 59, 61], "run": [10, 11, 26], "runtim": [42, 43], "sampl": 5, "save": [16, 24, 26, 29], "scalar": [24, 61], "scale": [42, 48], "scenario": 60, "scikit": 66, "script": [10, 35], "secur": 23, "segment": [5, 78], "select": 21, "sensit": [46, 47], "sentenc": 10, "sequenti": 70, "set": [5, 6, 8, 10, 11, 13, 16, 21, 24, 25, 26, 28, 32, 38, 75, 76], "shape": [18, 20, 21, 32], "silicon": 43, "simpl": 67, "size": 60, "slightli": 7, "softwar": 62, "sourc": [23, 25, 68, 72], "spars": 40, "sparsegpt": [51, 52, 61], "sparsiti": 41, "spec": [8, 29, 76], "specif": [29, 68], "specifi": [34, 40], "stablediffus": 61, "start": [18, 20, 26], "state": 67, "statement": 24, "static": 6, "step": 19, "subclass": 70, "summari": 60, "support": [2, 19, 24, 25, 39, 64, 68, 76], "swift": 19, "symmetr": 58, "target": [13, 39, 68], "tensor": [8, 30, 71], "tensorflow": [6, 7, 14, 15, 18, 19, 24, 28, 39, 68, 69, 70, 72], "test": [7, 8, 24], "tf": 15, "than": 20, "third": 25, "time": [34, 52], "toi": [37, 67], "token": 11, "tool": [0, 20, 25, 39, 62, 64, 73], "torch": [8, 20, 21, 24, 40, 41, 47, 52], "torchscript": 13, "torchvis": [8, 20], "trace": [5, 8, 10, 24, 31, 35, 36], "train": [3, 47, 51, 52, 56, 57, 63, 69, 70], "transform": [3, 15, 30, 70], "tree": 18, "tune": [56, 60, 63], "type": [16, 18, 24, 30, 32, 34, 38, 41, 43, 71, 72, 75, 78], "unbound": 21, "unifi": 0, "unit": [28, 34, 38], "unsupport": 20, "untyp": 71, "up": [6, 10, 11, 25], "updat": [18, 21, 29, 74, 76, 77], "upgrad": [23, 25], "us": [3, 6, 19, 20, 21, 24, 26, 29, 30, 32, 34, 35, 40, 60, 67, 69], "user": 70, "util": [18, 30], "v": 26, "valu": 75, "vari": 7, "vector": 48, "version": [20, 25, 39, 40, 68], "virtual": 25, "vision": 1, "visual": 72, "weight": [30, 40, 56, 57, 65], "what": [62, 64], "which": 65, "why": [20, 34, 35, 37], "work": [6, 29, 57, 58], "workflow": [13, 19, 28, 39, 63, 69, 70, 72], "x": 68, "xcode": [1, 5, 16, 18, 24, 26, 78], "xgboost": 79, "your": 31}}) \ No newline at end of file diff --git a/docs-guides/source/classifiers.html b/docs-guides/source/classifiers.html index cf8e9feda..783254220 100644 --- a/docs-guides/source/classifiers.html +++ b/docs-guides/source/classifiers.html @@ -41,7 +41,7 @@ - + @@ -562,7 +562,7 @@

Produce a Classifier Model - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/comparing-ml-programs-and-neural-networks.html b/docs-guides/source/comparing-ml-programs-and-neural-networks.html index 62336aa93..2a7c13b5d 100644 --- a/docs-guides/source/comparing-ml-programs-and-neural-networks.html +++ b/docs-guides/source/comparing-ml-programs-and-neural-networks.html @@ -41,7 +41,7 @@ - + @@ -604,7 +604,7 @@

Differences in Detail - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/composite-operators.html b/docs-guides/source/composite-operators.html index 854d6e522..1b4422ea7 100644 --- a/docs-guides/source/composite-operators.html +++ b/docs-guides/source/composite-operators.html @@ -41,7 +41,7 @@ - + @@ -617,7 +617,7 @@

More Examples - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/conversion-options.html b/docs-guides/source/conversion-options.html index f65385f84..ba0379a0c 100644 --- a/docs-guides/source/conversion-options.html +++ b/docs-guides/source/conversion-options.html @@ -41,7 +41,7 @@ - + @@ -485,7 +485,7 @@

Conversion Options - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/convert-a-pytorch-segmentation-model.html b/docs-guides/source/convert-a-pytorch-segmentation-model.html index b30df9180..15d6b4a61 100644 --- a/docs-guides/source/convert-a-pytorch-segmentation-model.html +++ b/docs-guides/source/convert-a-pytorch-segmentation-model.html @@ -41,7 +41,7 @@ - + @@ -727,7 +727,7 @@

Convert the Model - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/convert-a-tensorflow-1-deepspeech-model.html b/docs-guides/source/convert-a-tensorflow-1-deepspeech-model.html index 0242f7351..c9793944a 100644 --- a/docs-guides/source/convert-a-tensorflow-1-deepspeech-model.html +++ b/docs-guides/source/convert-a-tensorflow-1-deepspeech-model.html @@ -41,7 +41,7 @@ - + @@ -675,7 +675,7 @@

Convert a Dynamic Model to a Static One - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/convert-a-tensorflow-1-image-classifier.html b/docs-guides/source/convert-a-tensorflow-1-image-classifier.html index d0a68f2ef..189eab383 100644 --- a/docs-guides/source/convert-a-tensorflow-1-image-classifier.html +++ b/docs-guides/source/convert-a-tensorflow-1-image-classifier.html @@ -41,7 +41,7 @@ - + @@ -745,7 +745,7 @@

Preprocess the Image Before Converting - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/convert-a-torchvision-model-from-pytorch.html b/docs-guides/source/convert-a-torchvision-model-from-pytorch.html index 0203cb938..39be297d7 100644 --- a/docs-guides/source/convert-a-torchvision-model-from-pytorch.html +++ b/docs-guides/source/convert-a-torchvision-model-from-pytorch.html @@ -41,7 +41,7 @@ - + @@ -776,7 +776,7 @@

Make a Prediction with Core ML and Print Outputs - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/convert-learning-models.html b/docs-guides/source/convert-learning-models.html index 8f7d93549..1daca05ee 100644 --- a/docs-guides/source/convert-learning-models.html +++ b/docs-guides/source/convert-learning-models.html @@ -41,7 +41,7 @@ - + @@ -482,7 +482,7 @@

Converting Deep Learning Models - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/convert-nlp-model.html b/docs-guides/source/convert-nlp-model.html index 2268672f1..d3633ff3d 100644 --- a/docs-guides/source/convert-nlp-model.html +++ b/docs-guides/source/convert-nlp-model.html @@ -41,7 +41,7 @@ - + @@ -623,7 +623,7 @@

Run the Converted Core ML Model - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/convert-openelm.html b/docs-guides/source/convert-openelm.html index 5b2097af8..dfb167bb7 100644 --- a/docs-guides/source/convert-openelm.html +++ b/docs-guides/source/convert-openelm.html @@ -41,7 +41,7 @@ - + @@ -643,7 +643,7 @@

Run the Converted Core ML Model - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/convert-pytorch-workflow.html b/docs-guides/source/convert-pytorch-workflow.html index 2dc0526db..b37bcaa32 100644 --- a/docs-guides/source/convert-pytorch-workflow.html +++ b/docs-guides/source/convert-pytorch-workflow.html @@ -41,7 +41,7 @@ - + @@ -601,7 +601,7 @@

Convert to Core ML - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/convert-pytorch.html b/docs-guides/source/convert-pytorch.html index 56591bd33..b6603f993 100644 --- a/docs-guides/source/convert-pytorch.html +++ b/docs-guides/source/convert-pytorch.html @@ -41,7 +41,7 @@ - + @@ -482,7 +482,7 @@

Converting from PyTorch - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/convert-tensorflow-2-bert-transformer-models.html b/docs-guides/source/convert-tensorflow-2-bert-transformer-models.html index 134eba090..9dc8e271e 100644 --- a/docs-guides/source/convert-tensorflow-2-bert-transformer-models.html +++ b/docs-guides/source/convert-tensorflow-2-bert-transformer-models.html @@ -41,7 +41,7 @@ - + @@ -618,7 +618,7 @@

Convert the TF Hub BERT Transformer Model - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/convert-tensorflow.html b/docs-guides/source/convert-tensorflow.html index 0ca812d54..9e77d1d90 100644 --- a/docs-guides/source/convert-tensorflow.html +++ b/docs-guides/source/convert-tensorflow.html @@ -41,7 +41,7 @@ - + @@ -486,7 +486,7 @@

Converting from TensorFlow - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/convert-to-ml-program.html b/docs-guides/source/convert-to-ml-program.html index 03fc96253..fbf607eb8 100644 --- a/docs-guides/source/convert-to-ml-program.html +++ b/docs-guides/source/convert-to-ml-program.html @@ -41,7 +41,7 @@ - + @@ -587,7 +587,7 @@

Availability of ML Programs - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/convert-to-neural-network.html b/docs-guides/source/convert-to-neural-network.html index 48ad8dc3e..e79556ac0 100644 --- a/docs-guides/source/convert-to-neural-network.html +++ b/docs-guides/source/convert-to-neural-network.html @@ -41,7 +41,7 @@ - + @@ -489,7 +489,7 @@

Convert Models to Neural Networks

diff --git a/docs-guides/source/coremltools-examples.html b/docs-guides/source/coremltools-examples.html index 865db26b1..eacc4ee15 100644 --- a/docs-guides/source/coremltools-examples.html +++ b/docs-guides/source/coremltools-examples.html @@ -41,7 +41,7 @@ - + @@ -708,7 +708,7 @@

Updatable Models - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/custom-operators.html b/docs-guides/source/custom-operators.html index da06abc33..6e3fe01dc 100644 --- a/docs-guides/source/custom-operators.html +++ b/docs-guides/source/custom-operators.html @@ -41,7 +41,7 @@ - + @@ -645,7 +645,7 @@

Step 4: Implement Classes in Swift - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/faqs.html b/docs-guides/source/faqs.html index 899baabc3..713d9e5e0 100644 --- a/docs-guides/source/faqs.html +++ b/docs-guides/source/faqs.html @@ -41,7 +41,7 @@ - + @@ -708,7 +708,7 @@

Use a compiled model for faster initialization - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/flexible-inputs.html b/docs-guides/source/flexible-inputs.html index a85e7c3b0..7c3b113b6 100644 --- a/docs-guides/source/flexible-inputs.html +++ b/docs-guides/source/flexible-inputs.html @@ -41,7 +41,7 @@ - + @@ -709,7 +709,7 @@

API Reference - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/graph-passes-intro.html b/docs-guides/source/graph-passes-intro.html index 74ac6d607..f1569a0c7 100644 --- a/docs-guides/source/graph-passes-intro.html +++ b/docs-guides/source/graph-passes-intro.html @@ -41,7 +41,7 @@ - + @@ -475,7 +475,7 @@

Graph Passes

diff --git a/docs-guides/source/how-to-contribute.html b/docs-guides/source/how-to-contribute.html index 345227045..bc9f6ae7e 100644 --- a/docs-guides/source/how-to-contribute.html +++ b/docs-guides/source/how-to-contribute.html @@ -41,7 +41,7 @@ - + @@ -582,7 +582,7 @@

Labels# - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/image-inputs.html b/docs-guides/source/image-inputs.html index c9dc52e9d..5c0dd2d51 100644 --- a/docs-guides/source/image-inputs.html +++ b/docs-guides/source/image-inputs.html @@ -41,7 +41,7 @@ - + @@ -854,7 +854,7 @@

Preprocessing for Torch - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/installing-coremltools.html b/docs-guides/source/installing-coremltools.html index 94d31f1e2..4f85109e2 100644 --- a/docs-guides/source/installing-coremltools.html +++ b/docs-guides/source/installing-coremltools.html @@ -41,7 +41,7 @@ - + @@ -641,7 +641,7 @@

Upgrade Core ML Tools - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/introductory-quickstart.html b/docs-guides/source/introductory-quickstart.html index ba43b8bc2..9d120f5fe 100644 --- a/docs-guides/source/introductory-quickstart.html +++ b/docs-guides/source/introductory-quickstart.html @@ -41,7 +41,7 @@ - + @@ -750,7 +750,7 @@

Produce a Neural Network - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/libsvm-conversion.html b/docs-guides/source/libsvm-conversion.html index 3d0de5ba4..a36e9389c 100644 --- a/docs-guides/source/libsvm-conversion.html +++ b/docs-guides/source/libsvm-conversion.html @@ -41,7 +41,7 @@ - + @@ -489,7 +489,7 @@

LibSVM

diff --git a/docs-guides/source/load-and-convert-model.html b/docs-guides/source/load-and-convert-model.html index c998e3581..fa94e7142 100644 --- a/docs-guides/source/load-and-convert-model.html +++ b/docs-guides/source/load-and-convert-model.html @@ -41,7 +41,7 @@ - + @@ -638,7 +638,7 @@

Convert From TensorFlow 1 - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

diff --git a/docs-guides/source/mlmodel-utilities.html b/docs-guides/source/mlmodel-utilities.html index c7536c87e..a226cde12 100644 --- a/docs-guides/source/mlmodel-utilities.html +++ b/docs-guides/source/mlmodel-utilities.html @@ -41,7 +41,7 @@ - + @@ -420,6 +420,8 @@

Contents

  • Bisect Model
  • Change Model Tensor Input/Output Types
  • +
  • Compute Plan
  • +
  • In-memory Model
  • @@ -615,7 +617,7 @@

    Change Model Tensor Input/Output Types
    from coremltools.models.model import MLModel
    -from coremltools.utils import change_array_output_type
    +from coremltools.utils import change_input_output_tensor_type
     from coremltools.proto.FeatureTypes_pb2 import ArrayFeatureType
     
     model = MLModel("my_model.mlpackage")
    @@ -629,7 +631,7 @@ 

    Change Model Tensor Input/Output Types
    from coremltools.models.model import MLModel
    -from coremltools.utils import change_array_output_type
    +from coremltools.utils import change_input_output_tensor_type
     from coremltools.proto.FeatureTypes_pb2 import ArrayFeatureType
     
     model = MLModel("my_model.mlpackage")
    @@ -656,6 +658,83 @@ 

    Change Model Tensor Input/Output Types"*" string means all relevant inputs/outputs will be modified (those that will match the from_type type)

    +
    +

    Compute Plan#

    +

    In certain situations, you may want to evaluate the computational needs of a Core ML model before deploying it. +The MLComputePlan class is designed for this purpose, allowing you to get insights into the resources and costs +associated with using the model.

    +

    Here’s what you can do with MLComputePlan:

    +
      +
    • Model Structure: Examine the model structure.

    • +
    • Compute Device Usage: Get insights into the compute devices that would be used for executing an ML Program operation/ NeuralNetwork layer.

    • +
    • Estimated Cost: Get the estimated cost of executing an ML Program operation.

    • +
    +

    An example on how to use MLComputePlan to get the estimated cost and compute device usages for the operations in an ML Program:

    +
    import coremltools as ct
    +# Path to the compiled ML Program model.
    +compiled_model_path = "my_model.mlmodelc"
    +# Load the compute plan of a model.
    +compute_plan = ct.models.MLComputePlan.compute_plan.load_from_path(
    +    path=compiled_model_path,
    +    compute_units=ct.ComputeUnits.ALL,
    +)
    +# Get the model structure.
    +program = compute_plan.model_structure.program
    +mainFunction = program.functions["main"]
    +for operation in mainFunction.block.operations:
    +    # Get the compute device usage for the operation.
    +    compute_device_usage = (
    +        compute_plan.get_compute_device_usage_for_mlprogram_operation(operation)
    +    )
    +    # Get the estimated cost of executing the operation.
    +    estimated_cost = compute_plan.get_estimated_cost_for_mlprogram_operation(operation)
    +
    +
    +
    +
    +

    In-memory Model#

    +

    If you are using an in-memory model in your application, you can easily test the workflow with MLModelAsset. The MLModelAsset class includes +the MLModelAsset.from_memory API, which enables you to load a model directly from the model’s in-memory specification data. Once loaded, you +can use the model to make predictions.

    +

    An example on how to use MLModelAsset to load an MLCompiledModel from in-memory specification data:

    +
    import coremltools as ct
    +# Path to the model.
    +model = MLModel("my_model.model")
    +model_spec = model.get_spec()
    +spec_data = model_spec.SerializeToString()
    +asset = ct.models.model.MLModelAsset.from_memory(spec_data=spec_data)
    +compiled_model = ct.models.CompiledMLModel.from_asset(asset=asset)
    +result = compiled_model.predict(
    +    {
    +        "x": np.array([1.0]),
    +        "y": np.array([2.0]),
    +    }
    +)
    +
    +
    +

    Another example on how to use MLModelAsset to load a MLCompiledModel from in-memory specification data where the specification has external blob file references :

    +
    import coremltools as ct
    +# Path to the model.
    +mlmodel = MLModel("my_model.mlpackage")
    +weight_file_path = mlmodel.weights_dir + "/weight.bin"
    +with open(weight_file_path, "rb") as file:
    +    weights_data = file.read()
    +    model_spec = model.get_spec()
    +    spec_data = model_spec.SerializeToString()
    +    # Provide the weights data as `blob_mapping`.
    +    asset = ct.models.model.MLModelAsset.from_memory(
    +        spec_data=spec_data, blob_mapping={"weights/weight.bin": weights_data}
    +    )
    +    compiled_model = ct.models.CompiledMLModel.from_asset(asset=asset)
    +    result = compiled_model.predict(
    +      {
    +          "x": np.array([1.0]),
    +          "y": np.array([2.0]),
    +      }
    +    )    
    +
    +
    +
    @@ -714,6 +793,8 @@

    Change Model Tensor Input/Output TypesBisect Model
  • Change Model Tensor Input/Output Types
  • +
  • Compute Plan
  • +
  • In-memory Model
  • @@ -738,7 +819,7 @@

    Change Model Tensor Input/Output Types - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/mlmodel.html b/docs-guides/source/mlmodel.html index 3a491ba6a..860737ec1 100644 --- a/docs-guides/source/mlmodel.html +++ b/docs-guides/source/mlmodel.html @@ -41,7 +41,7 @@ - + @@ -608,7 +608,7 @@

    Update the Metadata and Input/Output Descriptions - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/model-exporting.html b/docs-guides/source/model-exporting.html index 519e54fbf..a8bf5205c 100644 --- a/docs-guides/source/model-exporting.html +++ b/docs-guides/source/model-exporting.html @@ -41,7 +41,7 @@ - + @@ -611,7 +611,7 @@

    Difference from Tracing - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/model-input-and-output-types.html b/docs-guides/source/model-input-and-output-types.html index 146be2ee1..2fb4795d0 100644 --- a/docs-guides/source/model-input-and-output-types.html +++ b/docs-guides/source/model-input-and-output-types.html @@ -41,7 +41,7 @@ - + @@ -548,7 +548,7 @@

    Provide the Shape of the Input - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/model-intermediate-language.html b/docs-guides/source/model-intermediate-language.html index 161e0b4d2..18b9f3340 100644 --- a/docs-guides/source/model-intermediate-language.html +++ b/docs-guides/source/model-intermediate-language.html @@ -41,7 +41,7 @@ - + @@ -580,7 +580,7 @@

    Convert MIL to Core ML - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/model-prediction.html b/docs-guides/source/model-prediction.html index 8b3124b12..7b162de2a 100644 --- a/docs-guides/source/model-prediction.html +++ b/docs-guides/source/model-prediction.html @@ -41,7 +41,7 @@ - + @@ -782,7 +782,7 @@

    Timing Example - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/model-scripting.html b/docs-guides/source/model-scripting.html index cb58691cb..5872c1a21 100644 --- a/docs-guides/source/model-scripting.html +++ b/docs-guides/source/model-scripting.html @@ -41,7 +41,7 @@ - + @@ -586,7 +586,7 @@

    Mix Tracing and Scripting - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/model-tracing.html b/docs-guides/source/model-tracing.html index 0fec62477..81e01d85f 100644 --- a/docs-guides/source/model-tracing.html +++ b/docs-guides/source/model-tracing.html @@ -41,7 +41,7 @@ - + @@ -536,7 +536,7 @@

    Model Tracing

    diff --git a/docs-guides/source/multifunction-models.html b/docs-guides/source/multifunction-models.html index ffc0c2aa6..2b882a25f 100644 --- a/docs-guides/source/multifunction-models.html +++ b/docs-guides/source/multifunction-models.html @@ -41,7 +41,7 @@ - + @@ -677,7 +677,7 @@

    Combining models: toy example with LoRA adapters - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/new-conversion-options.html b/docs-guides/source/new-conversion-options.html index 81acd10f0..dc1ae7464 100644 --- a/docs-guides/source/new-conversion-options.html +++ b/docs-guides/source/new-conversion-options.html @@ -41,7 +41,7 @@ - + @@ -525,7 +525,7 @@

    Pick the Compute Units for Execution - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/new-features.html b/docs-guides/source/new-features.html index be4bd9dd8..4f027ae51 100644 --- a/docs-guides/source/new-features.html +++ b/docs-guides/source/new-features.html @@ -41,7 +41,7 @@ - + @@ -609,7 +609,7 @@

    Deprecated Methods and Support - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/opt-conversion.html b/docs-guides/source/opt-conversion.html index bde56b91c..e18c01440 100644 --- a/docs-guides/source/opt-conversion.html +++ b/docs-guides/source/opt-conversion.html @@ -41,7 +41,7 @@ - + @@ -940,7 +940,7 @@

    Version 1 - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/opt-joint-compression.html b/docs-guides/source/opt-joint-compression.html index 0708211b4..5e8c55236 100644 --- a/docs-guides/source/opt-joint-compression.html +++ b/docs-guides/source/opt-joint-compression.html @@ -41,7 +41,7 @@ - + @@ -845,7 +845,7 @@

    Joint sparsity and palettization - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/opt-opt1_3.html b/docs-guides/source/opt-opt1_3.html index e816f0072..2da21c175 100644 --- a/docs-guides/source/opt-opt1_3.html +++ b/docs-guides/source/opt-opt1_3.html @@ -41,7 +41,7 @@ - + @@ -988,7 +988,7 @@

    Conclusions - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/opt-overview-examples.html b/docs-guides/source/opt-overview-examples.html index 4b0ec8c5a..c2a3ac63e 100644 --- a/docs-guides/source/opt-overview-examples.html +++ b/docs-guides/source/opt-overview-examples.html @@ -41,7 +41,7 @@ - + @@ -484,7 +484,7 @@

    Examples diff --git a/docs-guides/source/opt-overview.html b/docs-guides/source/opt-overview.html index a7d138619..0bcf8d843 100644 --- a/docs-guides/source/opt-overview.html +++ b/docs-guides/source/opt-overview.html @@ -41,7 +41,7 @@ - + @@ -617,7 +617,7 @@

    Availability of features - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/opt-palettization-algos.html b/docs-guides/source/opt-palettization-algos.html index e932a8bd4..6e50ce215 100644 --- a/docs-guides/source/opt-palettization-algos.html +++ b/docs-guides/source/opt-palettization-algos.html @@ -41,7 +41,7 @@ - + @@ -689,7 +689,7 @@

    Results#< diff --git a/docs-guides/source/opt-palettization-api.html b/docs-guides/source/opt-palettization-api.html index 0a1e97ae8..18cf988b5 100644 --- a/docs-guides/source/opt-palettization-api.html +++ b/docs-guides/source/opt-palettization-api.html @@ -41,7 +41,7 @@ - + @@ -769,7 +769,7 @@

    Converting the Palettized PyTorch Model - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/opt-palettization-overview.html b/docs-guides/source/opt-palettization-overview.html index 0d1ec5984..4e4c1ec1f 100644 --- a/docs-guides/source/opt-palettization-overview.html +++ b/docs-guides/source/opt-palettization-overview.html @@ -41,7 +41,7 @@ - + @@ -542,7 +542,7 @@

    Quantizing the LUT - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/opt-palettization-perf.html b/docs-guides/source/opt-palettization-perf.html index 7c02b58ef..f9d9b9eb7 100644 --- a/docs-guides/source/opt-palettization-perf.html +++ b/docs-guides/source/opt-palettization-perf.html @@ -41,7 +41,7 @@ - + @@ -711,7 +711,7 @@

    Results#< diff --git a/docs-guides/source/opt-palettization.html b/docs-guides/source/opt-palettization.html index fc40bd15a..20695a76b 100644 --- a/docs-guides/source/opt-palettization.html +++ b/docs-guides/source/opt-palettization.html @@ -41,7 +41,7 @@ - + @@ -479,7 +479,7 @@

    Palettization - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/opt-pruning-algos.html b/docs-guides/source/opt-pruning-algos.html index 4f565c4d3..92db771c6 100644 --- a/docs-guides/source/opt-pruning-algos.html +++ b/docs-guides/source/opt-pruning-algos.html @@ -41,7 +41,7 @@ - + @@ -655,7 +655,7 @@

    Results#< diff --git a/docs-guides/source/opt-pruning-api.html b/docs-guides/source/opt-pruning-api.html index db78c467d..ccc14a6c0 100644 --- a/docs-guides/source/opt-pruning-api.html +++ b/docs-guides/source/opt-pruning-api.html @@ -41,7 +41,7 @@ - + @@ -675,7 +675,7 @@

    Converting Torch models to Core ML - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/opt-pruning-overview.html b/docs-guides/source/opt-pruning-overview.html index 38c388d43..c54a21b59 100644 --- a/docs-guides/source/opt-pruning-overview.html +++ b/docs-guides/source/opt-pruning-overview.html @@ -41,7 +41,7 @@ - + @@ -517,7 +517,7 @@

    Overview diff --git a/docs-guides/source/opt-pruning-perf.html b/docs-guides/source/opt-pruning-perf.html index 1c0ad11e0..c6aad6ce5 100644 --- a/docs-guides/source/opt-pruning-perf.html +++ b/docs-guides/source/opt-pruning-perf.html @@ -41,7 +41,7 @@ - + @@ -601,7 +601,7 @@

    Results#< diff --git a/docs-guides/source/opt-pruning.html b/docs-guides/source/opt-pruning.html index da6bcc64a..7f6cd9a5e 100644 --- a/docs-guides/source/opt-pruning.html +++ b/docs-guides/source/opt-pruning.html @@ -41,7 +41,7 @@ - + @@ -479,7 +479,7 @@

    Pruning#< diff --git a/docs-guides/source/opt-quantization-algos.html b/docs-guides/source/opt-quantization-algos.html index b82743837..ade2253ed 100644 --- a/docs-guides/source/opt-quantization-algos.html +++ b/docs-guides/source/opt-quantization-algos.html @@ -41,7 +41,7 @@ - + @@ -632,7 +632,7 @@

    Accuracy data - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/opt-quantization-api.html b/docs-guides/source/opt-quantization-api.html index b4e500d0a..ad9471d53 100644 --- a/docs-guides/source/opt-quantization-api.html +++ b/docs-guides/source/opt-quantization-api.html @@ -41,7 +41,7 @@ - + @@ -784,7 +784,7 @@

    Converting quantized PyTorch models to Core ML - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/opt-quantization-overview.html b/docs-guides/source/opt-quantization-overview.html index 073320094..385bc9bcb 100644 --- a/docs-guides/source/opt-quantization-overview.html +++ b/docs-guides/source/opt-quantization-overview.html @@ -41,7 +41,7 @@ - + @@ -557,7 +557,7 @@

    Activation Quantization - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/opt-quantization-perf.html b/docs-guides/source/opt-quantization-perf.html index 18bfe6609..570ef21a7 100644 --- a/docs-guides/source/opt-quantization-perf.html +++ b/docs-guides/source/opt-quantization-perf.html @@ -41,7 +41,7 @@ - + @@ -662,7 +662,7 @@

    Results#< diff --git a/docs-guides/source/opt-quantization.html b/docs-guides/source/opt-quantization.html index d6388e387..0f2830bdd 100644 --- a/docs-guides/source/opt-quantization.html +++ b/docs-guides/source/opt-quantization.html @@ -41,7 +41,7 @@ - + @@ -479,7 +479,7 @@

    Linear Quantization - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/opt-resnet.html b/docs-guides/source/opt-resnet.html index bac88952e..b7167dadf 100644 --- a/docs-guides/source/opt-resnet.html +++ b/docs-guides/source/opt-resnet.html @@ -41,7 +41,7 @@ - + @@ -836,7 +836,7 @@

    Summary#< diff --git a/docs-guides/source/opt-stable-diffusion.html b/docs-guides/source/opt-stable-diffusion.html index 5a3d2ca50..e762c4494 100644 --- a/docs-guides/source/opt-stable-diffusion.html +++ b/docs-guides/source/opt-stable-diffusion.html @@ -41,7 +41,7 @@ - + @@ -741,7 +741,7 @@

    Results#< diff --git a/docs-guides/source/opt-whats-new.html b/docs-guides/source/opt-whats-new.html index c7e1639e8..9ab6868e5 100644 --- a/docs-guides/source/opt-whats-new.html +++ b/docs-guides/source/opt-whats-new.html @@ -41,7 +41,7 @@ - + @@ -638,7 +638,7 @@

    Core ML Tools 7 - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/opt-workflow.html b/docs-guides/source/opt-workflow.html index d387ae044..4bce997bc 100644 --- a/docs-guides/source/opt-workflow.html +++ b/docs-guides/source/opt-workflow.html @@ -41,7 +41,7 @@ - + @@ -722,7 +722,7 @@

    With fine-tuning - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/overview-coremltools.html b/docs-guides/source/overview-coremltools.html index 6b3522440..e11286578 100644 --- a/docs-guides/source/overview-coremltools.html +++ b/docs-guides/source/overview-coremltools.html @@ -41,7 +41,7 @@ - + @@ -555,7 +555,7 @@

    Supported Libraries and Frameworks - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/quantization-neural-network.html b/docs-guides/source/quantization-neural-network.html index 17184ee5b..cacba99fb 100644 --- a/docs-guides/source/quantization-neural-network.html +++ b/docs-guides/source/quantization-neural-network.html @@ -41,7 +41,7 @@ - + @@ -623,7 +623,7 @@

    Control Which Layers are Quantized - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/sci-kit-learn-conversion.html b/docs-guides/source/sci-kit-learn-conversion.html index b1703ca3e..d0b01935d 100644 --- a/docs-guides/source/sci-kit-learn-conversion.html +++ b/docs-guides/source/sci-kit-learn-conversion.html @@ -41,7 +41,7 @@ - + @@ -490,7 +490,7 @@

    Scikit-learn

    diff --git a/docs-guides/source/stateful-models.html b/docs-guides/source/stateful-models.html index d3bdc8b7f..b3eff7fca 100644 --- a/docs-guides/source/stateful-models.html +++ b/docs-guides/source/stateful-models.html @@ -41,7 +41,7 @@ - + @@ -915,7 +915,7 @@

    Example: Toy Attention Model with Stateful KV-Cache - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/target-conversion-formats.html b/docs-guides/source/target-conversion-formats.html index c40d60aba..3550f5c02 100644 --- a/docs-guides/source/target-conversion-formats.html +++ b/docs-guides/source/target-conversion-formats.html @@ -41,7 +41,7 @@ - + @@ -663,7 +663,7 @@

    Performance Improvements - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/tensorflow-1-workflow.html b/docs-guides/source/tensorflow-1-workflow.html index c0b9a3cef..bb45fec60 100644 --- a/docs-guides/source/tensorflow-1-workflow.html +++ b/docs-guides/source/tensorflow-1-workflow.html @@ -41,7 +41,7 @@ - + @@ -669,7 +669,7 @@

    More Examples - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/tensorflow-2.html b/docs-guides/source/tensorflow-2.html index 5ffb1391a..e3fdb9551 100644 --- a/docs-guides/source/tensorflow-2.html +++ b/docs-guides/source/tensorflow-2.html @@ -41,7 +41,7 @@ - + @@ -691,7 +691,7 @@

    Contents

    diff --git a/docs-guides/source/typed-execution-example.html b/docs-guides/source/typed-execution-example.html index 9f5176d52..d6e6f6c16 100644 --- a/docs-guides/source/typed-execution-example.html +++ b/docs-guides/source/typed-execution-example.html @@ -41,7 +41,7 @@ - + @@ -705,7 +705,7 @@

    Make a Visual Comparison - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/typed-execution.html b/docs-guides/source/typed-execution.html index 902b48147..592f37164 100644 --- a/docs-guides/source/typed-execution.html +++ b/docs-guides/source/typed-execution.html @@ -41,7 +41,7 @@ - + @@ -565,7 +565,7 @@

    ML Program Typed Tensors - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/unified-conversion-api.html b/docs-guides/source/unified-conversion-api.html index 6da360c5b..58bb40e1b 100644 --- a/docs-guides/source/unified-conversion-api.html +++ b/docs-guides/source/unified-conversion-api.html @@ -41,7 +41,7 @@ - + @@ -495,7 +495,7 @@

    Core ML Tools API Overview - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/updatable-model-examples.html b/docs-guides/source/updatable-model-examples.html index 758b06ec9..b110ee8cc 100644 --- a/docs-guides/source/updatable-model-examples.html +++ b/docs-guides/source/updatable-model-examples.html @@ -41,7 +41,7 @@ - + @@ -480,7 +480,7 @@

    Updatable Models - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/updatable-nearest-neighbor-classifier.html b/docs-guides/source/updatable-nearest-neighbor-classifier.html index b342106fb..293bc5820 100644 --- a/docs-guides/source/updatable-nearest-neighbor-classifier.html +++ b/docs-guides/source/updatable-nearest-neighbor-classifier.html @@ -41,7 +41,7 @@ - + @@ -664,7 +664,7 @@

    Set the Index Type - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/updatable-neural-network-classifier-on-mnist-dataset.html b/docs-guides/source/updatable-neural-network-classifier-on-mnist-dataset.html index 04f717af0..6e048beaa 100644 --- a/docs-guides/source/updatable-neural-network-classifier-on-mnist-dataset.html +++ b/docs-guides/source/updatable-neural-network-classifier-on-mnist-dataset.html @@ -41,7 +41,7 @@ - + @@ -750,7 +750,7 @@

    Make the Model Updatable - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/updatable-tiny-drawing-classifier-pipeline-model.html b/docs-guides/source/updatable-tiny-drawing-classifier-pipeline-model.html index 3a8c3f476..6d02c421f 100644 --- a/docs-guides/source/updatable-tiny-drawing-classifier-pipeline-model.html +++ b/docs-guides/source/updatable-tiny-drawing-classifier-pipeline-model.html @@ -41,7 +41,7 @@ - + @@ -650,7 +650,7 @@

    Create an Updatable Pipeline Model - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/xcode-model-preview-types.html b/docs-guides/source/xcode-model-preview-types.html index 6460657a8..a0f6973a8 100644 --- a/docs-guides/source/xcode-model-preview-types.html +++ b/docs-guides/source/xcode-model-preview-types.html @@ -41,7 +41,7 @@ - + @@ -663,7 +663,7 @@

    Preview the Model in Xcode - © Copyright 2023, Apple Inc. + © Copyright 2024, Apple Inc.

    diff --git a/docs-guides/source/xgboost-conversion.html b/docs-guides/source/xgboost-conversion.html index 64c714039..14fc29910 100644 --- a/docs-guides/source/xgboost-conversion.html +++ b/docs-guides/source/xgboost-conversion.html @@ -41,7 +41,7 @@ - + @@ -481,7 +481,7 @@

    XGBoost

    diff --git a/docs/_build/html/.buildinfo b/docs/.buildinfo similarity index 82% rename from docs/_build/html/.buildinfo rename to docs/.buildinfo index 47c061afa..cdc2cc76e 100644 --- a/docs/_build/html/.buildinfo +++ b/docs/.buildinfo @@ -1,4 +1,4 @@ # Sphinx build info version 1 # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. -config: 1e4a6f4eed3620cbf17ca042f5c721d4 +config: 8367d97272c196a64a981b0863c55dc8 tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/docs/_build/doctrees/_examples/dkm_palettization.doctree b/docs/_build/doctrees/_examples/dkm_palettization.doctree deleted file mode 100644 index 82504ade8..000000000 Binary files a/docs/_build/doctrees/_examples/dkm_palettization.doctree and /dev/null differ diff --git a/docs/_build/doctrees/_examples/index.doctree b/docs/_build/doctrees/_examples/index.doctree deleted file mode 100644 index eb587a02e..000000000 Binary files a/docs/_build/doctrees/_examples/index.doctree and /dev/null differ diff --git a/docs/_build/doctrees/_examples/linear_quantization.doctree b/docs/_build/doctrees/_examples/linear_quantization.doctree deleted file mode 100644 index 28ee6370c..000000000 Binary files a/docs/_build/doctrees/_examples/linear_quantization.doctree and /dev/null differ diff --git a/docs/_build/doctrees/_examples/magnitude_pruning.doctree b/docs/_build/doctrees/_examples/magnitude_pruning.doctree deleted file mode 100644 index 7e828c3e5..000000000 Binary files a/docs/_build/doctrees/_examples/magnitude_pruning.doctree and /dev/null differ diff --git a/docs/_build/doctrees/_examples/sg_execution_times.doctree b/docs/_build/doctrees/_examples/sg_execution_times.doctree deleted file mode 100644 index 17e5c9ee7..000000000 Binary files a/docs/_build/doctrees/_examples/sg_execution_times.doctree and /dev/null differ diff --git a/docs/_build/doctrees/environment.pickle b/docs/_build/doctrees/environment.pickle deleted file mode 100644 index 8e112e423..000000000 Binary files a/docs/_build/doctrees/environment.pickle and /dev/null differ diff --git a/docs/_build/doctrees/index.doctree b/docs/_build/doctrees/index.doctree deleted file mode 100644 index 46c0f1f6a..000000000 Binary files a/docs/_build/doctrees/index.doctree and /dev/null differ diff --git a/docs/_build/doctrees/sg_execution_times.doctree b/docs/_build/doctrees/sg_execution_times.doctree deleted file mode 100644 index 26d2afb71..000000000 Binary files a/docs/_build/doctrees/sg_execution_times.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/api-versions.doctree b/docs/_build/doctrees/source/api-versions.doctree deleted file mode 100644 index cab1c5672..000000000 Binary files a/docs/_build/doctrees/source/api-versions.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.converters.convert.doctree b/docs/_build/doctrees/source/coremltools.converters.convert.doctree deleted file mode 100644 index 3038afbf7..000000000 Binary files a/docs/_build/doctrees/source/coremltools.converters.convert.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.converters.doctree b/docs/_build/doctrees/source/coremltools.converters.doctree deleted file mode 100644 index f38dea73e..000000000 Binary files a/docs/_build/doctrees/source/coremltools.converters.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.converters.libsvm.doctree b/docs/_build/doctrees/source/coremltools.converters.libsvm.doctree deleted file mode 100644 index 04eb828a9..000000000 Binary files a/docs/_build/doctrees/source/coremltools.converters.libsvm.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.converters.mil.doctree b/docs/_build/doctrees/source/coremltools.converters.mil.doctree deleted file mode 100644 index e90a4a993..000000000 Binary files a/docs/_build/doctrees/source/coremltools.converters.mil.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.converters.mil.input_types.doctree b/docs/_build/doctrees/source/coremltools.converters.mil.input_types.doctree deleted file mode 100644 index 111a166d7..000000000 Binary files a/docs/_build/doctrees/source/coremltools.converters.mil.input_types.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.converters.mil.mil.ops.defs.doctree b/docs/_build/doctrees/source/coremltools.converters.mil.mil.ops.defs.doctree deleted file mode 100644 index 9664b7b4c..000000000 Binary files a/docs/_build/doctrees/source/coremltools.converters.mil.mil.ops.defs.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.converters.mil.mil.passes.defs.doctree b/docs/_build/doctrees/source/coremltools.converters.mil.mil.passes.defs.doctree deleted file mode 100644 index 620bd1c53..000000000 Binary files a/docs/_build/doctrees/source/coremltools.converters.mil.mil.passes.defs.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.converters.sklearn.doctree b/docs/_build/doctrees/source/coremltools.converters.sklearn.doctree deleted file mode 100644 index 2ea0aeee9..000000000 Binary files a/docs/_build/doctrees/source/coremltools.converters.sklearn.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.converters.xgboost.doctree b/docs/_build/doctrees/source/coremltools.converters.xgboost.doctree deleted file mode 100644 index 8970fc23e..000000000 Binary files a/docs/_build/doctrees/source/coremltools.converters.xgboost.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.models.doctree b/docs/_build/doctrees/source/coremltools.models.doctree deleted file mode 100644 index c5e414322..000000000 Binary files a/docs/_build/doctrees/source/coremltools.models.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.models.ml_program.doctree b/docs/_build/doctrees/source/coremltools.models.ml_program.doctree deleted file mode 100644 index a5ffab1c8..000000000 Binary files a/docs/_build/doctrees/source/coremltools.models.ml_program.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.models.neural_network.doctree b/docs/_build/doctrees/source/coremltools.models.neural_network.doctree deleted file mode 100644 index 5e6015044..000000000 Binary files a/docs/_build/doctrees/source/coremltools.models.neural_network.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.optimize.coreml.palettization.doctree b/docs/_build/doctrees/source/coremltools.optimize.coreml.palettization.doctree deleted file mode 100644 index 666c5b0f8..000000000 Binary files a/docs/_build/doctrees/source/coremltools.optimize.coreml.palettization.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.optimize.coreml.post_training_quantization.doctree b/docs/_build/doctrees/source/coremltools.optimize.coreml.post_training_quantization.doctree deleted file mode 100644 index d3ec38691..000000000 Binary files a/docs/_build/doctrees/source/coremltools.optimize.coreml.post_training_quantization.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.optimize.coreml.pruning.doctree b/docs/_build/doctrees/source/coremltools.optimize.coreml.pruning.doctree deleted file mode 100644 index e934a4c11..000000000 Binary files a/docs/_build/doctrees/source/coremltools.optimize.coreml.pruning.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.optimize.coreml.quantization.doctree b/docs/_build/doctrees/source/coremltools.optimize.coreml.quantization.doctree deleted file mode 100644 index 08988c1e6..000000000 Binary files a/docs/_build/doctrees/source/coremltools.optimize.coreml.quantization.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.optimize.coreml.utilities.doctree b/docs/_build/doctrees/source/coremltools.optimize.coreml.utilities.doctree deleted file mode 100644 index 83370a3e4..000000000 Binary files a/docs/_build/doctrees/source/coremltools.optimize.coreml.utilities.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.optimize.doctree b/docs/_build/doctrees/source/coremltools.optimize.doctree deleted file mode 100644 index fee0f6e47..000000000 Binary files a/docs/_build/doctrees/source/coremltools.optimize.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.optimize.torch.examples.doctree b/docs/_build/doctrees/source/coremltools.optimize.torch.examples.doctree deleted file mode 100644 index a7341ff22..000000000 Binary files a/docs/_build/doctrees/source/coremltools.optimize.torch.examples.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.optimize.torch.palettization.doctree b/docs/_build/doctrees/source/coremltools.optimize.torch.palettization.doctree deleted file mode 100644 index 67a53fdbb..000000000 Binary files a/docs/_build/doctrees/source/coremltools.optimize.torch.palettization.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.optimize.torch.pruning.doctree b/docs/_build/doctrees/source/coremltools.optimize.torch.pruning.doctree deleted file mode 100644 index 443c97dfc..000000000 Binary files a/docs/_build/doctrees/source/coremltools.optimize.torch.pruning.doctree and /dev/null differ diff --git a/docs/_build/doctrees/source/coremltools.optimize.torch.quantization.doctree b/docs/_build/doctrees/source/coremltools.optimize.torch.quantization.doctree deleted file mode 100644 index f25881218..000000000 Binary files a/docs/_build/doctrees/source/coremltools.optimize.torch.quantization.doctree and /dev/null differ diff --git a/docs/_build/html/_downloads/c276a0975534ab93174d95e83f81c9e7/linear_quantization.ipynb b/docs/_build/html/_downloads/c276a0975534ab93174d95e83f81c9e7/linear_quantization.ipynb deleted file mode 100644 index 6639773e5..000000000 --- a/docs/_build/html/_downloads/c276a0975534ab93174d95e83f81c9e7/linear_quantization.ipynb +++ /dev/null @@ -1,219 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n\n# Linear Quantization\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In this tutorial, you learn how to train a simple convolutional neural network on\n[MNIST](http://yann.lecun.com/exdb/mnist/) using :py:class:`~.quantization.LinearQuantizer`.\n\nLearn more about other quantization in the coremltools \n[Training-Time Quantization Documentation](https://coremltools.readme.io/v7.0/docs/data-dependent-quantization).\n\n\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Network and Dataset Definition\nFirst define your network, which consists of a single convolution layer\nfollowed by a dense (linear) layer.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from collections import OrderedDict\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\n\ndef mnist_net(num_classes=10):\n return nn.Sequential(\n OrderedDict(\n [\n (\"conv\", nn.Conv2d(1, 12, 3, padding=1)),\n (\"relu\", nn.ReLU()),\n (\"pool\", nn.MaxPool2d(2, stride=2, padding=0)),\n (\"flatten\", nn.Flatten()),\n (\"dense\", nn.Linear(2352, num_classes)),\n (\"softmax\", nn.LogSoftmax()),\n ]\n )\n )" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Use the [MNIST dataset provided by PyTorch](https://pytorch.org/vision/stable/generated/torchvision.datasets.MNIST.html#mnist)\nfor training. Apply a very simple transformation to the input\nimages to normalize them.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "import os\n\nfrom torchvision import datasets, transforms\n\n\ndef mnist_dataset(data_dir=\"~/.mnist_qat_data\"):\n transform = transforms.Compose(\n [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]\n )\n data_path = os.path.expanduser(f\"{data_dir}/mnist\")\n if not os.path.exists(data_path):\n os.makedirs(data_path)\n train = datasets.MNIST(data_path, train=True, download=True, transform=transform)\n test = datasets.MNIST(data_path, train=False, transform=transform)\n return train, test" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Next, initialize the model and the dataset.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "model = mnist_net()\n\nbatch_size = 128\ntrain_dataset, test_dataset = mnist_dataset()\ntrain_loader = torch.utils.data.DataLoader(\n train_dataset, batch_size=batch_size, shuffle=True\n)\ntest_loader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Training the Model Without Quantization\nTrain the model without any quantization applied.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "optimizer = torch.optim.Adam(model.parameters(), eps=1e-07)\naccuracy_unquantized = 0.0\nnum_epochs = 4\n\n\ndef train_step(model, optimizer, train_loader, data, target, batch_idx, epoch):\n optimizer.zero_grad()\n output = model(data)\n loss = F.nll_loss(output, target)\n loss.backward()\n optimizer.step()\n if batch_idx % 100 == 0:\n print(\n \"Train Epoch: {} [{}/{} ({:.0f}%)]\\tLoss: {:.6f}\".format(\n epoch,\n batch_idx * len(data),\n len(train_loader.dataset),\n 100.0 * batch_idx / len(train_loader),\n loss.item(),\n )\n )\n\n\ndef eval_model(model, test_loader):\n model.eval()\n test_loss = 0\n correct = 0\n with torch.no_grad():\n for data, target in test_loader:\n output = model(data)\n test_loss += F.nll_loss(output, target, reduction=\"sum\").item()\n pred = output.argmax(dim=1, keepdim=True)\n correct += pred.eq(target.view_as(pred)).sum().item()\n\n test_loss /= len(test_loader.dataset)\n accuracy = 100.0 * correct / len(test_loader.dataset)\n\n print(\n \"\\nTest set: Average loss: {:.4f}, Accuracy: {:.1f}%\\n\".format(\n test_loss, accuracy\n )\n )\n return accuracy\n\n\nfor epoch in range(num_epochs):\n # train one epoch\n model.train()\n for batch_idx, (data, target) in enumerate(train_loader):\n train_step(model, optimizer, train_loader, data, target, batch_idx, epoch)\n\n # evaluate\n accuracy_unquantized = eval_model(model, test_loader)\n\n\nprint(\"Accuracy of unquantized network: {:.1f}%\\n\".format(accuracy_unquantized))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Insert Quantization Layers in the Model\nInstall :py:class:`~.quantization.LinearQuantizer` in the trained model.\n\nCreate an instance of the :py:class:`~.quantization.LinearQuantizerConfig` class\nto specify quantization parameters. ``milestones=[0, 1, 2, 1]`` refers to the following:\n\n* *Index 0*: At 0th epoch, observers will start collecting statistics of values of tensors being quantized\n* *Index 1*: At 1st epoch, quantization simulation will begin\n* *Index 2*: At 2nd epoch, observers will stop collecting and quantization parameters will be frozen\n* *Index 3*: At 1st epoch, batch normalization layers will stop collecting mean and variance, and will start running in inference mode\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from coremltools.optimize.torch.quantization import (\n LinearQuantizer,\n LinearQuantizerConfig,\n ModuleLinearQuantizerConfig,\n)\n\nglobal_config = ModuleLinearQuantizerConfig(milestones=[0, 1, 2, 1])\nconfig = LinearQuantizerConfig(global_config=global_config)\n\nquantizer = LinearQuantizer(model, config)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Next, call :py:meth:`~.quantization.LinearQuantizer.prepare` to insert fake quantization\nlayers in the model.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "qmodel = quantizer.prepare(example_inputs=torch.randn(1, 1, 28, 28))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Fine-Tuning the Model\nThe next step is to fine tune the model with quantization applied.\nCall :py:meth:`~.quantization.LinearQuantizer.step` to step through the\nquantization milestones.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "optimizer = torch.optim.Adam(qmodel.parameters(), eps=1e-07)\naccuracy_quantized = 0.0\nnum_epochs = 4\n\nfor epoch in range(num_epochs):\n # train one epoch\n model.train()\n for batch_idx, (data, target) in enumerate(train_loader):\n quantizer.step()\n train_step(qmodel, optimizer, train_loader, data, target, batch_idx, epoch)\n\n # evaluate\n accuracy_quantized = eval_model(qmodel, test_loader)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The evaluation shows that you can train a quantized network without a significant loss\nin model accuracy. In practice, for more complex models,\nquantization can be lossy and lead to degradation in validation accuracy.\nIn such cases, you can choose to not quantize certain layers which are\nless amenable to quantization.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "print(\"Accuracy of quantized network: {:.1f}%\\n\".format(accuracy_quantized))\nprint(\"Accuracy of unquantized network: {:.1f}%\\n\".format(accuracy_unquantized))\n\nnp.testing.assert_allclose(accuracy_quantized, accuracy_unquantized, atol=2)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Finalizing the Model for Export\n\nThe example shows that you can quantize the model with a few code changes to your\nexisting PyTorch training code. Now you can deploy this model on a device.\n\nTo finalize the model for export, call :py:meth:`~.pruning.LinearQuantizer.finalize`\non the quantizer. This folds the quantization parameters like scale and zero point\ninto the weights.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "qmodel.eval()\nquantized_model = quantizer.finalize()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Exporting the Model for On-Device Execution\n\nIn order to deploy the model, convert it to a Core ML model.\n\nFollow the same steps in Core ML Tools for exporting a regular PyTorch model\n(for details, see [Converting from PyTorch](https://coremltools.readme.io/docs/pytorch-conversion)).\nThe parameter ``ct.target.iOS17`` is necessary here because activation quantization\nops are only supported on iOS versions >= 17.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "import coremltools as ct\n\nexample_input = torch.rand(1, 1, 28, 28)\ntraced_model = torch.jit.trace(quantized_model, example_input)\n\ncoreml_model = ct.convert(\n traced_model,\n inputs=[ct.TensorType(shape=example_input.shape)],\n minimum_deployment_target=ct.target.iOS17,\n)\n\ncoreml_model.save(\"~/.mnist_qat_data/quantized_model.mlpackage\")" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.14" - } - }, - "nbformat": 4, - "nbformat_minor": 0 -} \ No newline at end of file diff --git a/docs/_build/html/_static/js/html5shiv-printshiv.min.js b/docs/_build/html/_static/js/html5shiv-printshiv.min.js deleted file mode 100644 index 2b43bd062..000000000 --- a/docs/_build/html/_static/js/html5shiv-printshiv.min.js +++ /dev/null @@ -1,4 +0,0 @@ -/** -* @preserve HTML5 Shiv 3.7.3-pre | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed -*/ -!function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=y.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=y.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),y.elements=c+" "+a,j(b)}function f(a){var b=x[a[v]];return b||(b={},w++,a[v]=w,x[w]=b),b}function g(a,c,d){if(c||(c=b),q)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():u.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||t.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),q)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return y.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(y,b.frag)}function j(a){a||(a=b);var d=f(a);return!y.shivCSS||p||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),q||i(a,d),a}function k(a){for(var b,c=a.getElementsByTagName("*"),e=c.length,f=RegExp("^(?:"+d().join("|")+")$","i"),g=[];e--;)b=c[e],f.test(b.nodeName)&&g.push(b.applyElement(l(b)));return g}function l(a){for(var b,c=a.attributes,d=c.length,e=a.ownerDocument.createElement(A+":"+a.nodeName);d--;)b=c[d],b.specified&&e.setAttribute(b.nodeName,b.nodeValue);return e.style.cssText=a.style.cssText,e}function m(a){for(var b,c=a.split("{"),e=c.length,f=RegExp("(^|[\\s,>+~])("+d().join("|")+")(?=[[\\s,>+~#.:]|$)","gi"),g="$1"+A+"\\:$2";e--;)b=c[e]=c[e].split("}"),b[b.length-1]=b[b.length-1].replace(f,g),c[e]=b.join("}");return c.join("{")}function n(a){for(var b=a.length;b--;)a[b].removeNode()}function o(a){function b(){clearTimeout(g._removeSheetTimer),d&&d.removeNode(!0),d=null}var d,e,g=f(a),h=a.namespaces,i=a.parentWindow;return!B||a.printShived?a:("undefined"==typeof h[A]&&h.add(A),i.attachEvent("onbeforeprint",function(){b();for(var f,g,h,i=a.styleSheets,j=[],l=i.length,n=Array(l);l--;)n[l]=i[l];for(;h=n.pop();)if(!h.disabled&&z.test(h.media)){try{f=h.imports,g=f.length}catch(o){g=0}for(l=0;g>l;l++)n.push(f[l]);try{j.push(h.cssText)}catch(o){}}j=m(j.reverse().join("")),e=k(a),d=c(a,j)}),i.attachEvent("onafterprint",function(){n(e),clearTimeout(g._removeSheetTimer),g._removeSheetTimer=setTimeout(b,500)}),a.printShived=!0,a)}var p,q,r="3.7.3",s=a.html5||{},t=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,u=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,v="_html5shiv",w=0,x={};!function(){try{var a=b.createElement("a");a.innerHTML="",p="hidden"in a,q=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){p=!0,q=!0}}();var y={elements:s.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:r,shivCSS:s.shivCSS!==!1,supportsUnknownElements:q,shivMethods:s.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=y,j(b);var z=/^$|\b(?:all|print)\b/,A="html5shiv",B=!q&&function(){var c=b.documentElement;return!("undefined"==typeof b.namespaces||"undefined"==typeof b.parentWindow||"undefined"==typeof c.applyElement||"undefined"==typeof c.removeNode||"undefined"==typeof a.attachEvent)}();y.type+=" print",y.shivPrint=o,o(b),"object"==typeof module&&module.exports&&(module.exports=y)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/docs/_build/html/_static/js/html5shiv.min.js b/docs/_build/html/_static/js/html5shiv.min.js deleted file mode 100644 index cd1c674f5..000000000 --- a/docs/_build/html/_static/js/html5shiv.min.js +++ /dev/null @@ -1,4 +0,0 @@ -/** -* @preserve HTML5 Shiv 3.7.3 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed -*/ -!function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3-pre",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/docs/_build/html/objects.inv b/docs/_build/html/objects.inv deleted file mode 100644 index ce31ccc15..000000000 Binary files a/docs/_build/html/objects.inv and /dev/null differ diff --git a/docs/_build/html/searchindex.js b/docs/_build/html/searchindex.js deleted file mode 100644 index c832fdfe5..000000000 --- a/docs/_build/html/searchindex.js +++ /dev/null @@ -1 +0,0 @@ -Search.setIndex({"alltitles": {"API Contents": [[5, null]], "ClassifierConfig": [[12, "classifierconfig"]], "Compiled MLModel": [[17, "compiled-mlmodel"]], "Computation times": [[4, null], [6, null]], "Configuring Palettization": [[0, "configuring-palettization"]], "Converters": [[8, null]], "Core ML": [[20, "core-ml"]], "Defining the Network and Dataset": [[0, "defining-the-network-and-dataset"]], "EnumeratedShapes": [[12, "enumeratedshapes"]], "Examples": [[26, null]], "Exporting the Model for On-Device Execution": [[0, "exporting-the-model-for-on-device-execution"], [2, "exporting-the-model-for-on-device-execution"], [3, "exporting-the-model-for-on-device-execution"]], "Finalizing the Model for Export": [[2, "finalizing-the-model-for-export"], [3, "finalizing-the-model-for-export"]], "Fine-Tuning the Model": [[2, "fine-tuning-the-model"]], "Fine-Tuning the Palettized Model": [[0, "fine-tuning-the-palettized-model"]], "Fine-Tuning the Pruned Model": [[3, "fine-tuning-the-pruned-model"]], "GPTQ": [[29, "gptq"]], "ImageType": [[12, "imagetype"]], "InputType": [[12, "inputtype"]], "Insert Quantization Layers in the Model": [[2, "insert-quantization-layers-in-the-model"]], "Installing the Pruner in the Model": [[3, "installing-the-pruner-in-the-model"]], "LibSVM": [[10, null]], "Linear Quantization": [[2, null]], "MIL Builder": [[11, null]], "MIL Graph Passes": [[14, null]], "MIL Input Types": [[12, null]], "MIL Ops": [[13, null]], "MLModel": [[17, "module-coremltools.models.model"]], "Magnitude Pruning": [[3, null], [28, "magnitude-pruning"]], "Model APIs": [[17, null]], "Network and Dataset Definition": [[2, "network-and-dataset-definition"], [3, "network-and-dataset-definition"]], "Optimizers": [[20, null]], "Palettization": [[21, null], [27, null]], "Palettization Using Differentiable K-Means": [[0, null]], "Post-Training Compression": [[22, null]], "Previous Versions": [[7, null]], "Pruning": [[23, null], [28, null]], "Pruning scheduler": [[28, "pruning-scheduler"]], "PyTorch": [[20, "pytorch"]], "Quantization": [[24, null], [29, null]], "RangeDim": [[12, "rangedim"]], "Resources": [[5, null]], "Restoring LUT and Indices as Weights": [[0, "restoring-lut-and-indices-as-weights"]], "SKLearn": [[15, null]], "Shape": [[12, "shape"]], "SparseGPT": [[28, "sparsegpt"]], "StateType": [[12, "statetype"]], "TensorType": [[12, "tensortype"]], "Training the Model Without Palettization": [[0, "training-the-model-without-palettization"]], "Training the Model Without Pruning": [[3, "training-the-model-without-pruning"]], "Training the Model Without Quantization": [[2, "training-the-model-without-quantization"]], "Unified (TensorFlow and Pytorch)": [[9, null]], "Utilities": [[25, null]], "XGBoost": [[16, null]], "activation (iOS 15+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.activation"]], "activation (iOS 17+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.activation"]], "array_feature_extractor": [[17, "module-coremltools.models.array_feature_extractor"]], "classify": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.classify"]], "cleanup": [[14, "module-coremltools.converters.mil.mil.passes.defs.cleanup"]], "compression_utils": [[17, "compression-utils"]], "constexpr_ops (iOS 16+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS16.constexpr_ops"]], "constexpr_ops (iOS 18+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS18.compression"]], "control_flow": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.control_flow"]], "conv (iOS 15+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.conv"]], "conv (iOS 17+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.conv"]], "coreml_update_state": [[13, "module-coremltools.converters.mil.mil.ops.defs.coreml_dialect.ops"]], "coremltools API": [[5, null]], "elementwise_binary": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary"]], "elementwise_unary (iOS 15+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary"]], "elementwise_unary (iOS 17+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_unary"]], "extract_submodel": [[17, "module-coremltools.converters.mil.debugging_utils"]], "feature_vectorizer": [[17, "module-coremltools.models.feature_vectorizer"]], "image_resizing (iOS 15+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing"]], "image_resizing (iOS 16+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS16.image_resizing"]], "image_resizing (iOS 17+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.image_resizing"]], "linear (iOS 15+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.linear"]], "linear (iOS 17+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.linear"]], "nearest_neighbors": [[17, "module-coremltools.models.nearest_neighbors.builder"]], "neural_network": [[17, "neural-network"]], "neural_network.builder": [[19, null]], "neural_network.flexible_shape_utils": [[19, "module-coremltools.models.neural_network.flexible_shape_utils"]], "neural_network.quantization_utils": [[19, "module-coremltools.models.neural_network.quantization_utils"]], "neural_network.update_optimizer_utils": [[19, "module-coremltools.models.neural_network.update_optimizer_utils"]], "normalization (iOS 15+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.normalization"]], "normalization (iOS 17+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.normalization"]], "optimize_activation": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_activation"]], "optimize_conv": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_conv"]], "optimize_elementwise_binary": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_elementwise_binary"]], "optimize_linear": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_linear"]], "optimize_normalization": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_normalization"]], "optimize_quantization": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_quantization"]], "optimize_repeat_ops": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops"]], "optimize_state": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_state"]], "optimize_tensor_operation": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation"]], "pipeline": [[17, "module-coremltools.models.pipeline"]], "pool": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.pool"]], "preprocess": [[14, "module-coremltools.converters.mil.mil.passes.defs.preprocess"]], "quantization": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.quantization_ops"], [14, "module-coremltools.converters.mil.mil.passes.defs.quantization"]], "random": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.random"]], "recurrent (iOS 15+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.recurrent"]], "recurrent (iOS 17+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.recurrent"]], "recurrent (iOS 18+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS18.recurrent"]], "reduction (iOS 15+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.reduction"]], "reduction (iOS 17+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.reduction"]], "scatter_gather (iOS 15+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather"]], "scatter_gather (iOS 16+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS16.scatter_gather"]], "scatter_gather (iOS 17+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather"]], "states (iOS 18+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS18.states"]], "symbol_transform": [[14, "module-coremltools.converters.mil.mil.passes.defs.symbol_transform"]], "tensor_operation (iOS 15+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation"]], "tensor_operation (iOS 16+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS16.tensor_operation"]], "tensor_operation (iOS 17+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.tensor_operation"]], "tensor_transformation (iOS 15)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation"]], "tensor_transformation (iOS 16+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS16.tensor_transformation"]], "tensor_transformation (iOS 17+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transformation"]], "tensor_transformation (iOS 18+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS18.tensor_transformation"]], "transformers (iOS 18+)": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS18.transformers"]], "tree_ensemble": [[17, "module-coremltools.models.tree_ensemble"]], "utils": [[17, "module-coremltools.models.utils"]]}, "docnames": ["_examples/dkm_palettization", "_examples/index", "_examples/linear_quantization", "_examples/magnitude_pruning", "_examples/sg_execution_times", "index", "sg_execution_times", "source/api-versions", "source/coremltools.converters", "source/coremltools.converters.convert", "source/coremltools.converters.libsvm", "source/coremltools.converters.mil", "source/coremltools.converters.mil.input_types", "source/coremltools.converters.mil.mil.ops.defs", "source/coremltools.converters.mil.mil.passes.defs", "source/coremltools.converters.sklearn", "source/coremltools.converters.xgboost", "source/coremltools.models", "source/coremltools.models.ml_program", "source/coremltools.models.neural_network", "source/coremltools.optimize", "source/coremltools.optimize.coreml.palettization", "source/coremltools.optimize.coreml.post_training_quantization", "source/coremltools.optimize.coreml.pruning", "source/coremltools.optimize.coreml.quantization", "source/coremltools.optimize.coreml.utilities", "source/coremltools.optimize.torch.examples", "source/coremltools.optimize.torch.palettization", "source/coremltools.optimize.torch.pruning", "source/coremltools.optimize.torch.quantization"], "envversion": {"sphinx": 62, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.viewcode": 1}, "filenames": ["_examples/dkm_palettization.rst", "_examples/index.rst", "_examples/linear_quantization.rst", "_examples/magnitude_pruning.rst", "_examples/sg_execution_times.rst", "index.rst", "sg_execution_times.rst", "source/api-versions.rst", "source/coremltools.converters.rst", "source/coremltools.converters.convert.rst", "source/coremltools.converters.libsvm.rst", "source/coremltools.converters.mil.rst", "source/coremltools.converters.mil.input_types.rst", "source/coremltools.converters.mil.mil.ops.defs.rst", "source/coremltools.converters.mil.mil.passes.defs.rst", "source/coremltools.converters.sklearn.rst", "source/coremltools.converters.xgboost.rst", "source/coremltools.models.rst", "source/coremltools.models.ml_program.rst", "source/coremltools.models.neural_network.rst", "source/coremltools.optimize.rst", "source/coremltools.optimize.coreml.palettization.rst", "source/coremltools.optimize.coreml.post_training_quantization.rst", "source/coremltools.optimize.coreml.pruning.rst", "source/coremltools.optimize.coreml.quantization.rst", "source/coremltools.optimize.coreml.utilities.rst", "source/coremltools.optimize.torch.examples.rst", "source/coremltools.optimize.torch.palettization.rst", "source/coremltools.optimize.torch.pruning.rst", "source/coremltools.optimize.torch.quantization.rst"], "indexentries": {"__init__() (coremltools.converters.mil.input_types.classifierconfig method)": [[12, "coremltools.converters.mil.input_types.ClassifierConfig.__init__", false]], "__init__() (coremltools.converters.mil.input_types.enumeratedshapes method)": [[12, "coremltools.converters.mil.input_types.EnumeratedShapes.__init__", false]], "__init__() (coremltools.converters.mil.input_types.imagetype method)": [[12, "coremltools.converters.mil.input_types.ImageType.__init__", false]], "__init__() (coremltools.converters.mil.input_types.inputtype method)": [[12, "coremltools.converters.mil.input_types.InputType.__init__", false]], "__init__() (coremltools.converters.mil.input_types.rangedim method)": [[12, "coremltools.converters.mil.input_types.RangeDim.__init__", false]], "__init__() (coremltools.converters.mil.input_types.shape method)": [[12, "coremltools.converters.mil.input_types.Shape.__init__", false]], "__init__() (coremltools.converters.mil.input_types.statetype method)": [[12, "coremltools.converters.mil.input_types.StateType.__init__", false]], "__init__() (coremltools.converters.mil.input_types.tensortype method)": [[12, "coremltools.converters.mil.input_types.TensorType.__init__", false]], "__init__() (coremltools.models.compiledmlmodel method)": [[17, "coremltools.models.CompiledMLModel.__init__", false]], "__init__() (coremltools.models.model.mlmodel method)": [[17, "coremltools.models.model.MLModel.__init__", false]], "__init__() (coremltools.models.nearest_neighbors.builder.knearestneighborsclassifierbuilder method)": [[17, "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder.__init__", false]], "__init__() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.__init__", false]], "__init__() (coremltools.models.pipeline.pipeline method)": [[17, "coremltools.models.pipeline.Pipeline.__init__", false]], "__init__() (coremltools.models.pipeline.pipelineclassifier method)": [[17, "coremltools.models.pipeline.PipelineClassifier.__init__", false]], "__init__() (coremltools.models.pipeline.pipelineregressor method)": [[17, "coremltools.models.pipeline.PipelineRegressor.__init__", false]], "__init__() (coremltools.models.tree_ensemble.treeensemblebase method)": [[17, "coremltools.models.tree_ensemble.TreeEnsembleBase.__init__", false]], "__init__() (coremltools.models.tree_ensemble.treeensembleclassifier method)": [[17, "coremltools.models.tree_ensemble.TreeEnsembleClassifier.__init__", false]], "__init__() (coremltools.models.tree_ensemble.treeensembleregressor method)": [[17, "coremltools.models.tree_ensemble.TreeEnsembleRegressor.__init__", false]], "__init__() (coremltools.models.utils.multifunctiondescriptor method)": [[17, "coremltools.models.utils.MultiFunctionDescriptor.__init__", false]], "abs (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.abs", false]], "acos (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.acos", false]], "activate_int8_int8_matrix_multiplications() (in module coremltools.models.neural_network.quantization_utils)": [[19, "coremltools.models.neural_network.quantization_utils.activate_int8_int8_matrix_multiplications", false]], "adamparams (class in coremltools.models.neural_network.update_optimizer_utils)": [[19, "coremltools.models.neural_network.update_optimizer_utils.AdamParams", false]], "add (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.add", false]], "add_acos() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_acos", false]], "add_acosh() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_acosh", false]], "add_activation() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_activation", false]], "add_add_broadcastable() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_add_broadcastable", false]], "add_argmax() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_argmax", false]], "add_argmin() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_argmin", false]], "add_argsort() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_argsort", false]], "add_asin() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_asin", false]], "add_asinh() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_asinh", false]], "add_atan() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_atan", false]], "add_atanh() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_atanh", false]], "add_batched_mat_mul() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_batched_mat_mul", false]], "add_batchnorm() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_batchnorm", false]], "add_bias() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_bias", false]], "add_bidirlstm() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_bidirlstm", false]], "add_branch() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_branch", false]], "add_branch_node() (coremltools.models.tree_ensemble.treeensemblebase method)": [[17, "coremltools.models.tree_ensemble.TreeEnsembleBase.add_branch_node", false]], "add_broadcast_to_dynamic() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_broadcast_to_dynamic", false]], "add_broadcast_to_like() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_broadcast_to_like", false]], "add_broadcast_to_static() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_broadcast_to_static", false]], "add_categorical_distribution() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_categorical_distribution", false]], "add_ceil() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_ceil", false]], "add_clamped_relu() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_clamped_relu", false]], "add_clip() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_clip", false]], "add_concat_nd() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_concat_nd", false]], "add_constant_pad() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_constant_pad", false]], "add_conv_transpose_output_shape (class in coremltools.converters.mil.mil.passes.defs.optimize_conv)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_conv.add_conv_transpose_output_shape", false]], "add_convolution() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_convolution", false]], "add_convolution3d() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_convolution3d", false]], "add_copy() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_copy", false]], "add_cos() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_cos", false]], "add_cosh() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_cosh", false]], "add_crop() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_crop", false]], "add_crop_resize() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_crop_resize", false]], "add_cumsum() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_cumsum", false]], "add_custom() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_custom", false]], "add_divide_broadcastable() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_divide_broadcastable", false]], "add_elementwise() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_elementwise", false]], "add_embedding() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_embedding", false]], "add_embedding_nd() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_embedding_nd", false]], "add_enumerated_image_sizes() (in module coremltools.models.neural_network.flexible_shape_utils)": [[19, "coremltools.models.neural_network.flexible_shape_utils.add_enumerated_image_sizes", false]], "add_enumerated_multiarray_shapes() (in module coremltools.models.neural_network.flexible_shape_utils)": [[19, "coremltools.models.neural_network.flexible_shape_utils.add_enumerated_multiarray_shapes", false]], "add_equal() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_equal", false]], "add_erf() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_erf", false]], "add_exp2() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_exp2", false]], "add_expand_dims() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_expand_dims", false]], "add_fill_dynamic() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_fill_dynamic", false]], "add_fill_like() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_fill_like", false]], "add_fill_static() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_fill_static", false]], "add_flatten() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_flatten", false]], "add_flatten_to_2d() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_flatten_to_2d", false]], "add_floor() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_floor", false]], "add_floor_div_broadcastable() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_floor_div_broadcastable", false]], "add_fp16_cast (class in coremltools.converters.mil.mil.passes.defs.quantization)": [[14, "coremltools.converters.mil.mil.passes.defs.quantization.add_fp16_cast", false]], "add_function() (coremltools.models.utils.multifunctiondescriptor method)": [[17, "coremltools.models.utils.MultiFunctionDescriptor.add_function", false]], "add_gather() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_gather", false]], "add_gather_along_axis() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_gather_along_axis", false]], "add_gather_nd() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_gather_nd", false]], "add_gelu() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_gelu", false]], "add_get_shape() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_get_shape", false]], "add_global_pooling3d() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_global_pooling3d", false]], "add_greater_than() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_greater_than", false]], "add_gru() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_gru", false]], "add_inner_product() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_inner_product", false]], "add_l2_normalize() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_l2_normalize", false]], "add_layer_normalization() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_layer_normalization", false]], "add_leaf_node() (coremltools.models.tree_ensemble.treeensemblebase method)": [[17, "coremltools.models.tree_ensemble.TreeEnsembleBase.add_leaf_node", false]], "add_less_than() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_less_than", false]], "add_load_constant() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_load_constant", false]], "add_load_constant_nd() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_load_constant_nd", false]], "add_logical() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_logical", false]], "add_loop() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_loop", false]], "add_loop_break() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_loop_break", false]], "add_loop_continue() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_loop_continue", false]], "add_lower_triangular() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_lower_triangular", false]], "add_lrn() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_lrn", false]], "add_matrix_band_part() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_matrix_band_part", false]], "add_max_broadcastable() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_max_broadcastable", false]], "add_min_broadcastable() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_min_broadcastable", false]], "add_mod_broadcastable() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_mod_broadcastable", false]], "add_model() (coremltools.models.pipeline.pipeline method)": [[17, "coremltools.models.pipeline.Pipeline.add_model", false]], "add_model() (coremltools.models.pipeline.pipelineclassifier method)": [[17, "coremltools.models.pipeline.PipelineClassifier.add_model", false]], "add_model() (coremltools.models.pipeline.pipelineregressor method)": [[17, "coremltools.models.pipeline.PipelineRegressor.add_model", false]], "add_model() (coremltools.models.utils.multifunctiondescriptor method)": [[17, "coremltools.models.utils.MultiFunctionDescriptor.add_model", false]], "add_multiarray_ndshape_enumeration() (in module coremltools.models.neural_network.flexible_shape_utils)": [[19, "coremltools.models.neural_network.flexible_shape_utils.add_multiarray_ndshape_enumeration", false]], "add_multiply_broadcastable() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_multiply_broadcastable", false]], "add_mvn() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_mvn", false]], "add_nms() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_nms", false]], "add_not_equal() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_not_equal", false]], "add_one_hot() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_one_hot", false]], "add_optionals() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_optionals", false]], "add_padding() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_padding", false]], "add_permute() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_permute", false]], "add_pooling() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_pooling", false]], "add_pooling3d() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_pooling3d", false]], "add_pow_broadcastable() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_pow_broadcastable", false]], "add_random_bernoulli_dynamic() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_random_bernoulli_dynamic", false]], "add_random_bernoulli_like() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_random_bernoulli_like", false]], "add_random_bernoulli_static() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_random_bernoulli_static", false]], "add_random_normal_dynamic() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_random_normal_dynamic", false]], "add_random_normal_like() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_random_normal_like", false]], "add_random_normal_static() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_random_normal_static", false]], "add_random_uniform_dynamic() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_random_uniform_dynamic", false]], "add_random_uniform_like() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_random_uniform_like", false]], "add_random_uniform_static() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_random_uniform_static", false]], "add_range_dynamic() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_range_dynamic", false]], "add_range_static() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_range_static", false]], "add_rank_preserving_reshape() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_rank_preserving_reshape", false]], "add_reduce() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reduce", false]], "add_reduce_l1() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reduce_l1", false]], "add_reduce_l2() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reduce_l2", false]], "add_reduce_logsum() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reduce_logsum", false]], "add_reduce_logsumexp() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reduce_logsumexp", false]], "add_reduce_max() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reduce_max", false]], "add_reduce_mean() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reduce_mean", false]], "add_reduce_min() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reduce_min", false]], "add_reduce_prod() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reduce_prod", false]], "add_reduce_sum() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reduce_sum", false]], "add_reduce_sumsquare() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reduce_sumsquare", false]], "add_reorganize_data() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reorganize_data", false]], "add_reshape() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reshape", false]], "add_reshape_dynamic() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reshape_dynamic", false]], "add_reshape_like() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reshape_like", false]], "add_reshape_static() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reshape_static", false]], "add_resize_bilinear() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_resize_bilinear", false]], "add_reverse() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reverse", false]], "add_reverse_sequence() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_reverse_sequence", false]], "add_round() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_round", false]], "add_samples() (coremltools.models.nearest_neighbors.builder.knearestneighborsclassifierbuilder method)": [[17, "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder.add_samples", false]], "add_scale() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_scale", false]], "add_scatter() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_scatter", false]], "add_scatter_along_axis() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_scatter_along_axis", false]], "add_scatter_nd() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_scatter_nd", false]], "add_sequence_repeat() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_sequence_repeat", false]], "add_sign() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_sign", false]], "add_simple_rnn() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_simple_rnn", false]], "add_sin() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_sin", false]], "add_sinh() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_sinh", false]], "add_slice() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_slice", false]], "add_slice_by_size() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_slice_by_size", false]], "add_slice_dynamic() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_slice_dynamic", false]], "add_slice_static() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_slice_static", false]], "add_sliding_windows() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_sliding_windows", false]], "add_softmax() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_softmax", false]], "add_softmax_nd() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_softmax_nd", false]], "add_split() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_split", false]], "add_split_nd() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_split_nd", false]], "add_squeeze() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_squeeze", false]], "add_stack() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_stack", false]], "add_subtract_broadcastable() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_subtract_broadcastable", false]], "add_tan() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_tan", false]], "add_tanh() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_tanh", false]], "add_tile() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_tile", false]], "add_topk() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_topk", false]], "add_transpose() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_transpose", false]], "add_unary() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_unary", false]], "add_unilstm() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_unilstm", false]], "add_upper_triangular() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_upper_triangular", false]], "add_upsample() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_upsample", false]], "add_where_broadcastable() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_where_broadcastable", false]], "add_where_nonzero() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.add_where_nonzero", false]], "advancedquantizedlayerselector (class in coremltools.models.neural_network.quantization_utils)": [[19, "coremltools.models.neural_network.quantization_utils.AdvancedQuantizedLayerSelector", false]], "affine (class in coremltools.converters.mil.mil.ops.defs.ios15.image_resizing)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing.affine", false]], "affine_quantize_weights() (in module coremltools.models.ml_program.compression_utils)": [[18, "coremltools.models.ml_program.compression_utils.affine_quantize_weights", false]], "argsort (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.argsort", false]], "as_dict() (coremltools.optimize.torch.layerwise_compression.layerwisecompressorconfig method)": [[28, "coremltools.optimize.torch.layerwise_compression.LayerwiseCompressorConfig.as_dict", false], [29, "coremltools.optimize.torch.layerwise_compression.LayerwiseCompressorConfig.as_dict", false]], "as_dict() (coremltools.optimize.torch.palettization.dkmpalettizerconfig method)": [[27, "coremltools.optimize.torch.palettization.DKMPalettizerConfig.as_dict", false]], "as_dict() (coremltools.optimize.torch.palettization.moduledkmpalettizerconfig method)": [[27, "coremltools.optimize.torch.palettization.ModuleDKMPalettizerConfig.as_dict", false]], "as_dict() (coremltools.optimize.torch.pruning.magnitudeprunerconfig method)": [[28, "coremltools.optimize.torch.pruning.MagnitudePrunerConfig.as_dict", false]], "as_dict() (coremltools.optimize.torch.pruning.modulemagnitudeprunerconfig method)": [[28, "coremltools.optimize.torch.pruning.ModuleMagnitudePrunerConfig.as_dict", false]], "as_dict() (coremltools.optimize.torch.quantization.linearquantizerconfig method)": [[29, "coremltools.optimize.torch.quantization.LinearQuantizerConfig.as_dict", false]], "as_dict() (coremltools.optimize.torch.quantization.modulelinearquantizerconfig method)": [[29, "coremltools.optimize.torch.quantization.ModuleLinearQuantizerConfig.as_dict", false]], "asin (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.asin", false]], "atan (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.atan", false]], "atanh (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.atanh", false]], "author (coremltools.models.nearest_neighbors.builder.knearestneighborsclassifierbuilder property)": [[17, "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder.author", false]], "avg_pool (class in coremltools.converters.mil.mil.ops.defs.ios15.pool)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.pool.avg_pool", false]], "band_part (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.band_part", false]], "batch (class in coremltools.models.neural_network.update_optimizer_utils)": [[19, "coremltools.models.neural_network.update_optimizer_utils.Batch", false]], "batch_norm (class in coremltools.converters.mil.mil.ops.defs.ios15.normalization)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.normalization.batch_norm", false]], "batch_norm (class in coremltools.converters.mil.mil.ops.defs.ios17.normalization)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.normalization.batch_norm", false]], "batch_to_space (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation.batch_to_space", false]], "bisect_model() (in module coremltools.models.utils)": [[17, "coremltools.models.utils.bisect_model", false]], "builder (class in coremltools.converters.mil.mil)": [[11, "coremltools.converters.mil.mil.Builder", false]], "canonicalize_inplace_pattern (class in coremltools.converters.mil.mil.passes.defs.optimize_state)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_state.canonicalize_inplace_pattern", false]], "cast (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.cast", false]], "cast (class in coremltools.converters.mil.mil.ops.defs.ios17.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_unary.cast", false]], "cast_optimization (class in coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops.cast_optimization", false]], "ceil (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.ceil", false]], "clamped_relu (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.clamped_relu", false]], "clamped_relu (class in coremltools.converters.mil.mil.ops.defs.ios17.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.activation.clamped_relu", false]], "classifierconfig (class in coremltools.converters.mil.input_types)": [[12, "coremltools.converters.mil.input_types.ClassifierConfig", false]], "classify (class in coremltools.converters.mil.mil.ops.defs.ios15.classify)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.classify.classify", false]], "clip (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.clip", false]], "clip (class in coremltools.converters.mil.mil.ops.defs.ios17.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_unary.clip", false]], "compare_models() (in module coremltools.models.neural_network.quantization_utils)": [[19, "coremltools.models.neural_network.quantization_utils.compare_models", false]], "compile_model() (in module coremltools.models.utils)": [[17, "coremltools.models.utils.compile_model", false]], "compiledmlmodel (class in coremltools.models)": [[17, "coremltools.models.CompiledMLModel", false]], "compose_conv1d (class in coremltools.converters.mil.mil.passes.defs.optimize_conv)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_conv.compose_conv1d", false]], "compress() (coremltools.optimize.torch.layerwise_compression.layerwisecompressor method)": [[28, "coremltools.optimize.torch.layerwise_compression.LayerwiseCompressor.compress", false], [29, "coremltools.optimize.torch.layerwise_compression.LayerwiseCompressor.compress", false]], "compute_sparsity() (coremltools.optimize.torch.pruning.pruning_scheduler.constantsparsityscheduler method)": [[28, "coremltools.optimize.torch.pruning.pruning_scheduler.ConstantSparsityScheduler.compute_sparsity", false]], "compute_sparsity() (coremltools.optimize.torch.pruning.pruning_scheduler.polynomialdecayscheduler method)": [[28, "coremltools.optimize.torch.pruning.pruning_scheduler.PolynomialDecayScheduler.compute_sparsity", false]], "concat (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.concat", false]], "concat_to_pixel_shuffle (class in coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation.concat_to_pixel_shuffle", false]], "cond (class in coremltools.converters.mil.mil.ops.defs.ios15.control_flow)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.control_flow.cond", false]], "const (class in coremltools.converters.mil.mil.ops.defs.ios15.control_flow)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.control_flow.Const", false]], "const_deduplication (class in coremltools.converters.mil.mil.passes.defs.cleanup)": [[14, "coremltools.converters.mil.mil.passes.defs.cleanup.const_deduplication", false]], "const_elimination (class in coremltools.converters.mil.mil.passes.defs.cleanup)": [[14, "coremltools.converters.mil.mil.passes.defs.cleanup.const_elimination", false]], "constantsparsityscheduler (class in coremltools.optimize.torch.pruning.pruning_scheduler)": [[28, "coremltools.optimize.torch.pruning.pruning_scheduler.ConstantSparsityScheduler", false]], "constexpr_affine_dequantize (class in coremltools.converters.mil.mil.ops.defs.ios16.constexpr_ops)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS16.constexpr_ops.constexpr_affine_dequantize", false]], "constexpr_blockwise_shift_scale (class in coremltools.converters.mil.mil.ops.defs.ios18.compression)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS18.compression.constexpr_blockwise_shift_scale", false]], "constexpr_cast (class in coremltools.converters.mil.mil.ops.defs.ios16.constexpr_ops)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS16.constexpr_ops.constexpr_cast", false]], "constexpr_cast (class in coremltools.converters.mil.mil.ops.defs.ios18.compression)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS18.compression.constexpr_cast", false]], "constexpr_lut_to_dense (class in coremltools.converters.mil.mil.ops.defs.ios16.constexpr_ops)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS16.constexpr_ops.constexpr_lut_to_dense", false]], "constexpr_lut_to_dense (class in coremltools.converters.mil.mil.ops.defs.ios18.compression)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS18.compression.constexpr_lut_to_dense", false]], "constexpr_lut_to_sparse (class in coremltools.converters.mil.mil.ops.defs.ios18.compression)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS18.compression.constexpr_lut_to_sparse", false]], "constexpr_sparse_blockwise_shift_scale (class in coremltools.converters.mil.mil.ops.defs.ios18.compression)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS18.compression.constexpr_sparse_blockwise_shift_scale", false]], "constexpr_sparse_to_dense (class in coremltools.converters.mil.mil.ops.defs.ios16.constexpr_ops)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS16.constexpr_ops.constexpr_sparse_to_dense", false]], "constexpr_sparse_to_dense (class in coremltools.converters.mil.mil.ops.defs.ios18.compression)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS18.compression.constexpr_sparse_to_dense", false]], "conv (class in coremltools.converters.mil.mil.ops.defs.ios15.conv)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.conv.conv", false]], "conv (class in coremltools.converters.mil.mil.ops.defs.ios17.conv)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.conv.conv", false]], "conv_transpose (class in coremltools.converters.mil.mil.ops.defs.ios15.conv)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.conv.conv_transpose", false]], "conv_transpose (class in coremltools.converters.mil.mil.ops.defs.ios17.conv)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.conv.conv_transpose", false]], "convert() (in module coremltools.converters._converters_entry)": [[9, "coremltools.converters._converters_entry.convert", false]], "convert() (in module coremltools.converters.libsvm._libsvm_converter)": [[10, "coremltools.converters.libsvm._libsvm_converter.convert", false]], "convert() (in module coremltools.converters.sklearn._converter)": [[15, "coremltools.converters.sklearn._converter.convert", false]], "convert() (in module coremltools.converters.xgboost._tree)": [[16, "coremltools.converters.xgboost._tree.convert", false]], "convert_double_to_float_multiarray_type() (in module coremltools.models.utils)": [[17, "coremltools.models.utils.convert_double_to_float_multiarray_type", false]], "coreml_update_state (class in coremltools.converters.mil.mil.ops.defs.coreml_dialect.ops)": [[13, "coremltools.converters.mil.mil.ops.defs.coreml_dialect.ops.coreml_update_state", false]], "coremlopmetadata (class in coremltools.optimize.coreml)": [[22, "coremltools.optimize.coreml.CoreMLOpMetaData", false], [25, "coremltools.optimize.coreml.CoreMLOpMetaData", false]], "coremltools.converters._converters_entry": [[9, "module-coremltools.converters._converters_entry", false]], "coremltools.converters.libsvm._libsvm_converter": [[10, "module-coremltools.converters.libsvm._libsvm_converter", false]], "coremltools.converters.mil.debugging_utils": [[17, "module-coremltools.converters.mil.debugging_utils", false]], "coremltools.converters.mil.input_types": [[12, "module-coremltools.converters.mil.input_types", false]], "coremltools.converters.mil.mil": [[11, "module-coremltools.converters.mil.mil", false]], "coremltools.converters.mil.mil.ops.defs.coreml_dialect.ops": [[13, "module-coremltools.converters.mil.mil.ops.defs.coreml_dialect.ops", false]], "coremltools.converters.mil.mil.ops.defs.ios15.activation": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.activation", false]], "coremltools.converters.mil.mil.ops.defs.ios15.classify": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.classify", false]], "coremltools.converters.mil.mil.ops.defs.ios15.control_flow": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.control_flow", false]], "coremltools.converters.mil.mil.ops.defs.ios15.conv": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.conv", false]], "coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary", false]], "coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary", false]], "coremltools.converters.mil.mil.ops.defs.ios15.image_resizing": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing", false]], "coremltools.converters.mil.mil.ops.defs.ios15.linear": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.linear", false]], "coremltools.converters.mil.mil.ops.defs.ios15.normalization": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.normalization", false]], "coremltools.converters.mil.mil.ops.defs.ios15.pool": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.pool", false]], "coremltools.converters.mil.mil.ops.defs.ios15.random": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.random", false]], "coremltools.converters.mil.mil.ops.defs.ios15.recurrent": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.recurrent", false]], "coremltools.converters.mil.mil.ops.defs.ios15.reduction": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.reduction", false]], "coremltools.converters.mil.mil.ops.defs.ios15.scatter_gather": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather", false]], "coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation", false]], "coremltools.converters.mil.mil.ops.defs.ios15.tensor_transformation": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation", false]], "coremltools.converters.mil.mil.ops.defs.ios16.constexpr_ops": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS16.constexpr_ops", false]], "coremltools.converters.mil.mil.ops.defs.ios16.image_resizing": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS16.image_resizing", false]], "coremltools.converters.mil.mil.ops.defs.ios16.scatter_gather": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS16.scatter_gather", false]], "coremltools.converters.mil.mil.ops.defs.ios16.tensor_operation": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS16.tensor_operation", false]], "coremltools.converters.mil.mil.ops.defs.ios16.tensor_transformation": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS16.tensor_transformation", false]], "coremltools.converters.mil.mil.ops.defs.ios17.activation": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.activation", false]], "coremltools.converters.mil.mil.ops.defs.ios17.conv": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.conv", false]], "coremltools.converters.mil.mil.ops.defs.ios17.elementwise_unary": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_unary", false]], "coremltools.converters.mil.mil.ops.defs.ios17.image_resizing": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.image_resizing", false]], "coremltools.converters.mil.mil.ops.defs.ios17.linear": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.linear", false]], "coremltools.converters.mil.mil.ops.defs.ios17.normalization": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.normalization", false]], "coremltools.converters.mil.mil.ops.defs.ios17.quantization_ops": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.quantization_ops", false]], "coremltools.converters.mil.mil.ops.defs.ios17.recurrent": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.recurrent", false]], "coremltools.converters.mil.mil.ops.defs.ios17.reduction": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.reduction", false]], "coremltools.converters.mil.mil.ops.defs.ios17.scatter_gather": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather", false]], "coremltools.converters.mil.mil.ops.defs.ios17.tensor_operation": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.tensor_operation", false]], "coremltools.converters.mil.mil.ops.defs.ios17.tensor_transformation": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transformation", false]], "coremltools.converters.mil.mil.ops.defs.ios18.compression": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS18.compression", false]], "coremltools.converters.mil.mil.ops.defs.ios18.recurrent": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS18.recurrent", false]], "coremltools.converters.mil.mil.ops.defs.ios18.states": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS18.states", false]], "coremltools.converters.mil.mil.ops.defs.ios18.tensor_transformation": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS18.tensor_transformation", false]], "coremltools.converters.mil.mil.ops.defs.ios18.transformers": [[13, "module-coremltools.converters.mil.mil.ops.defs.iOS18.transformers", false]], "coremltools.converters.mil.mil.passes.defs.cleanup": [[14, "module-coremltools.converters.mil.mil.passes.defs.cleanup", false]], "coremltools.converters.mil.mil.passes.defs.optimize_activation": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_activation", false]], "coremltools.converters.mil.mil.passes.defs.optimize_conv": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_conv", false]], "coremltools.converters.mil.mil.passes.defs.optimize_elementwise_binary": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_elementwise_binary", false]], "coremltools.converters.mil.mil.passes.defs.optimize_linear": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_linear", false]], "coremltools.converters.mil.mil.passes.defs.optimize_normalization": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_normalization", false]], "coremltools.converters.mil.mil.passes.defs.optimize_quantization": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_quantization", false]], "coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops", false]], "coremltools.converters.mil.mil.passes.defs.optimize_state": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_state", false]], "coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation": [[14, "module-coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation", false]], "coremltools.converters.mil.mil.passes.defs.preprocess": [[14, "module-coremltools.converters.mil.mil.passes.defs.preprocess", false]], "coremltools.converters.mil.mil.passes.defs.quantization": [[14, "module-coremltools.converters.mil.mil.passes.defs.quantization", false]], "coremltools.converters.mil.mil.passes.defs.symbol_transform": [[14, "module-coremltools.converters.mil.mil.passes.defs.symbol_transform", false]], "coremltools.converters.sklearn._converter": [[15, "module-coremltools.converters.sklearn._converter", false]], "coremltools.converters.xgboost._tree": [[16, "module-coremltools.converters.xgboost._tree", false]], "coremltools.models.array_feature_extractor": [[17, "module-coremltools.models.array_feature_extractor", false]], "coremltools.models.feature_vectorizer": [[17, "module-coremltools.models.feature_vectorizer", false]], "coremltools.models.ml_program.compression_utils": [[18, "module-coremltools.models.ml_program.compression_utils", false]], "coremltools.models.model": [[17, "module-coremltools.models.model", false]], "coremltools.models.nearest_neighbors.builder": [[17, "module-coremltools.models.nearest_neighbors.builder", false]], "coremltools.models.neural_network.builder": [[19, "module-coremltools.models.neural_network.builder", false]], "coremltools.models.neural_network.flexible_shape_utils": [[19, "module-coremltools.models.neural_network.flexible_shape_utils", false]], "coremltools.models.neural_network.quantization_utils": [[19, "module-coremltools.models.neural_network.quantization_utils", false]], "coremltools.models.neural_network.update_optimizer_utils": [[19, "module-coremltools.models.neural_network.update_optimizer_utils", false]], "coremltools.models.pipeline": [[17, "module-coremltools.models.pipeline", false]], "coremltools.models.tree_ensemble": [[17, "module-coremltools.models.tree_ensemble", false]], "coremltools.models.utils": [[17, "module-coremltools.models.utils", false]], "coremltools.optimize.coreml": [[21, "module-coremltools.optimize.coreml", false], [22, "module-coremltools.optimize.coreml", false], [23, "module-coremltools.optimize.coreml", false], [24, "module-coremltools.optimize.coreml", false], [25, "module-coremltools.optimize.coreml", false]], "coremlweightmetadata (class in coremltools.optimize.coreml)": [[22, "coremltools.optimize.coreml.CoreMLWeightMetaData", false], [25, "coremltools.optimize.coreml.CoreMLWeightMetaData", false]], "cos (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.cos", false]], "cosh (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.cosh", false]], "create_array_feature_extractor() (in module coremltools.models.array_feature_extractor)": [[17, "coremltools.models.array_feature_extractor.create_array_feature_extractor", false]], "create_feature_vectorizer() (in module coremltools.models.feature_vectorizer)": [[17, "coremltools.models.feature_vectorizer.create_feature_vectorizer", false]], "crop (class in coremltools.converters.mil.mil.ops.defs.ios15.image_resizing)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing.crop", false]], "crop_resize (class in coremltools.converters.mil.mil.ops.defs.ios15.image_resizing)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing.crop_resize", false]], "crop_resize (class in coremltools.converters.mil.mil.ops.defs.ios16.image_resizing)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS16.image_resizing.crop_resize", false]], "crop_resize (class in coremltools.converters.mil.mil.ops.defs.ios17.image_resizing)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.image_resizing.crop_resize", false]], "cumsum (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.cumsum", false]], "dead_code_elimination (class in coremltools.converters.mil.mil.passes.defs.cleanup)": [[14, "coremltools.converters.mil.mil.passes.defs.cleanup.dead_code_elimination", false]], "decompress_weights() (in module coremltools.optimize.coreml)": [[22, "coremltools.optimize.coreml.decompress_weights", false], [25, "coremltools.optimize.coreml.decompress_weights", false]], "dedup_op_and_var_names (class in coremltools.converters.mil.mil.passes.defs.cleanup)": [[14, "coremltools.converters.mil.mil.passes.defs.cleanup.dedup_op_and_var_names", false]], "depth_to_space (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation.depth_to_space", false]], "dequantize (class in coremltools.converters.mil.mil.ops.defs.ios17.quantization_ops)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.quantization_ops.dequantize", false]], "dequantize_quantize_pair_elimination (class in coremltools.converters.mil.mil.passes.defs.optimize_quantization)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_quantization.dequantize_quantize_pair_elimination", false]], "dequantize_to_constexpr (class in coremltools.converters.mil.mil.passes.defs.optimize_quantization)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_quantization.dequantize_to_constexpr", false]], "description (coremltools.models.nearest_neighbors.builder.knearestneighborsclassifierbuilder property)": [[17, "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder.description", false]], "detect_concat_interleave (class in coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation.detect_concat_interleave", false]], "distributive_quantized_binary_op_scale_normalization (class in coremltools.converters.mil.mil.passes.defs.optimize_quantization)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_quantization.distributive_quantized_binary_op_scale_normalization", false]], "divide_to_multiply (class in coremltools.converters.mil.mil.passes.defs.optimize_elementwise_binary)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_elementwise_binary.divide_to_multiply", false]], "dkmpalettizer (class in coremltools.optimize.torch.palettization)": [[27, "coremltools.optimize.torch.palettization.DKMPalettizer", false]], "dkmpalettizerconfig (class in coremltools.optimize.torch.palettization)": [[27, "coremltools.optimize.torch.palettization.DKMPalettizerConfig", false]], "do_quantize() (coremltools.models.neural_network.quantization_utils.advancedquantizedlayerselector method)": [[19, "coremltools.models.neural_network.quantization_utils.AdvancedQuantizedLayerSelector.do_quantize", false]], "do_quantize() (coremltools.models.neural_network.quantization_utils.matrixmultiplylayerselector method)": [[19, "coremltools.models.neural_network.quantization_utils.MatrixMultiplyLayerSelector.do_quantize", false]], "einsum (class in coremltools.converters.mil.mil.ops.defs.ios15.linear)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.linear.einsum", false]], "elu (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.elu", false]], "elu (class in coremltools.converters.mil.mil.ops.defs.ios17.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.activation.elu", false]], "enumeratedshapes (class in coremltools.converters.mil.input_types)": [[12, "coremltools.converters.mil.input_types.EnumeratedShapes", false]], "equal (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.equal", false]], "erf (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.erf", false]], "evaluate_classifier() (in module coremltools.models.utils)": [[17, "coremltools.models.utils.evaluate_classifier", false]], "evaluate_classifier_with_probabilities() (in module coremltools.models.utils)": [[17, "coremltools.models.utils.evaluate_classifier_with_probabilities", false]], "evaluate_regressor() (in module coremltools.models.utils)": [[17, "coremltools.models.utils.evaluate_regressor", false]], "evaluate_transformer() (in module coremltools.models.utils)": [[17, "coremltools.models.utils.evaluate_transformer", false]], "exp (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.exp", false]], "exp2 (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.exp2", false]], "expand_dims (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation.expand_dims", false]], "expand_dims (class in coremltools.converters.mil.mil.ops.defs.ios17.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transformation.expand_dims", false]], "expand_dynamic_linear (class in coremltools.converters.mil.mil.passes.defs.cleanup)": [[14, "coremltools.converters.mil.mil.passes.defs.cleanup.expand_dynamic_linear", false]], "expand_high_rank_reshape_and_transpose (class in coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation.expand_high_rank_reshape_and_transpose", false]], "extract_submodel() (in module coremltools.converters.mil.debugging_utils)": [[17, "coremltools.converters.mil.debugging_utils.extract_submodel", false]], "fill (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.fill", false]], "fill_like (class in coremltools.converters.mil.mil.ops.defs.ios16.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS16.tensor_operation.fill_like", false]], "finalize() (coremltools.optimize.torch.palettization.dkmpalettizer method)": [[27, "coremltools.optimize.torch.palettization.DKMPalettizer.finalize", false]], "finalize() (coremltools.optimize.torch.pruning.magnitudepruner method)": [[28, "coremltools.optimize.torch.pruning.MagnitudePruner.finalize", false]], "finalize() (coremltools.optimize.torch.quantization.linearquantizer method)": [[29, "coremltools.optimize.torch.quantization.LinearQuantizer.finalize", false]], "flatten2d (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.flatten2d", false]], "floor (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.floor", false]], "floor_div (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.floor_div", false]], "from_dict() (coremltools.optimize.coreml.optimizationconfig class method)": [[25, "coremltools.optimize.coreml.OptimizationConfig.from_dict", false]], "from_dict() (coremltools.optimize.torch.layerwise_compression.layerwisecompressorconfig class method)": [[28, "coremltools.optimize.torch.layerwise_compression.LayerwiseCompressorConfig.from_dict", false], [29, "coremltools.optimize.torch.layerwise_compression.LayerwiseCompressorConfig.from_dict", false]], "from_dict() (coremltools.optimize.torch.palettization.dkmpalettizerconfig class method)": [[27, "coremltools.optimize.torch.palettization.DKMPalettizerConfig.from_dict", false]], "from_dict() (coremltools.optimize.torch.palettization.moduledkmpalettizerconfig class method)": [[27, "coremltools.optimize.torch.palettization.ModuleDKMPalettizerConfig.from_dict", false]], "from_dict() (coremltools.optimize.torch.pruning.magnitudeprunerconfig class method)": [[28, "coremltools.optimize.torch.pruning.MagnitudePrunerConfig.from_dict", false]], "from_dict() (coremltools.optimize.torch.pruning.modulemagnitudeprunerconfig class method)": [[28, "coremltools.optimize.torch.pruning.ModuleMagnitudePrunerConfig.from_dict", false]], "from_dict() (coremltools.optimize.torch.quantization.linearquantizerconfig class method)": [[29, "coremltools.optimize.torch.quantization.LinearQuantizerConfig.from_dict", false]], "from_dict() (coremltools.optimize.torch.quantization.modulelinearquantizerconfig class method)": [[29, "coremltools.optimize.torch.quantization.ModuleLinearQuantizerConfig.from_dict", false]], "from_yaml() (coremltools.optimize.coreml.optimizationconfig class method)": [[25, "coremltools.optimize.coreml.OptimizationConfig.from_yaml", false]], "from_yaml() (coremltools.optimize.torch.layerwise_compression.layerwisecompressorconfig class method)": [[28, "coremltools.optimize.torch.layerwise_compression.LayerwiseCompressorConfig.from_yaml", false], [29, "coremltools.optimize.torch.layerwise_compression.LayerwiseCompressorConfig.from_yaml", false]], "from_yaml() (coremltools.optimize.torch.palettization.dkmpalettizerconfig class method)": [[27, "coremltools.optimize.torch.palettization.DKMPalettizerConfig.from_yaml", false]], "from_yaml() (coremltools.optimize.torch.palettization.moduledkmpalettizerconfig class method)": [[27, "coremltools.optimize.torch.palettization.ModuleDKMPalettizerConfig.from_yaml", false]], "from_yaml() (coremltools.optimize.torch.pruning.magnitudeprunerconfig class method)": [[28, "coremltools.optimize.torch.pruning.MagnitudePrunerConfig.from_yaml", false]], "from_yaml() (coremltools.optimize.torch.pruning.modulemagnitudeprunerconfig class method)": [[28, "coremltools.optimize.torch.pruning.ModuleMagnitudePrunerConfig.from_yaml", false]], "from_yaml() (coremltools.optimize.torch.quantization.linearquantizerconfig class method)": [[29, "coremltools.optimize.torch.quantization.LinearQuantizerConfig.from_yaml", false]], "from_yaml() (coremltools.optimize.torch.quantization.modulelinearquantizerconfig class method)": [[29, "coremltools.optimize.torch.quantization.ModuleLinearQuantizerConfig.from_yaml", false]], "fuse_conv_batchnorm (class in coremltools.converters.mil.mil.passes.defs.optimize_conv)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_conv.fuse_conv_batchnorm", false]], "fuse_conv_bias (class in coremltools.converters.mil.mil.passes.defs.optimize_conv)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_conv.fuse_conv_bias", false]], "fuse_conv_scale (class in coremltools.converters.mil.mil.passes.defs.optimize_conv)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_conv.fuse_conv_scale", false]], "fuse_elementwise_to_batchnorm (class in coremltools.converters.mil.mil.passes.defs.optimize_elementwise_binary)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_elementwise_binary.fuse_elementwise_to_batchnorm", false]], "fuse_gelu_exact (class in coremltools.converters.mil.mil.passes.defs.optimize_activation)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_activation.fuse_gelu_exact", false]], "fuse_gelu_tanh_approximation (class in coremltools.converters.mil.mil.passes.defs.optimize_activation)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_activation.fuse_gelu_tanh_approximation", false]], "fuse_layernorm_or_instancenorm (class in coremltools.converters.mil.mil.passes.defs.optimize_normalization)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_normalization.fuse_layernorm_or_instancenorm", false]], "fuse_leaky_relu (class in coremltools.converters.mil.mil.passes.defs.optimize_activation)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_activation.fuse_leaky_relu", false]], "fuse_linear_bias (class in coremltools.converters.mil.mil.passes.defs.optimize_linear)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_linear.fuse_linear_bias", false]], "fuse_matmul_weight_bias (class in coremltools.converters.mil.mil.passes.defs.optimize_linear)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_linear.fuse_matmul_weight_bias", false]], "fuse_onehot_matmul_to_gather (class in coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation.fuse_onehot_matmul_to_gather", false]], "fuse_pad_conv (class in coremltools.converters.mil.mil.passes.defs.optimize_conv)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_conv.fuse_pad_conv", false]], "fuse_prelu (class in coremltools.converters.mil.mil.passes.defs.optimize_activation)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_activation.fuse_prelu", false]], "fuse_reduce_mean (class in coremltools.converters.mil.mil.passes.defs.cleanup)": [[14, "coremltools.converters.mil.mil.passes.defs.cleanup.fuse_reduce_mean", false]], "fuse_transpose_matmul (class in coremltools.converters.mil.mil.passes.defs.optimize_linear)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_linear.fuse_transpose_matmul", false]], "gather (class in coremltools.converters.mil.mil.ops.defs.ios15.scatter_gather)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather.gather", false]], "gather (class in coremltools.converters.mil.mil.ops.defs.ios16.scatter_gather)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS16.scatter_gather.gather", false]], "gather (class in coremltools.converters.mil.mil.ops.defs.ios17.scatter_gather)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather.gather", false]], "gather_along_axis (class in coremltools.converters.mil.mil.ops.defs.ios15.scatter_gather)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather.gather_along_axis", false]], "gather_along_axis (class in coremltools.converters.mil.mil.ops.defs.ios17.scatter_gather)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather.gather_along_axis", false]], "gather_nd (class in coremltools.converters.mil.mil.ops.defs.ios15.scatter_gather)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather.gather_nd", false]], "gather_nd (class in coremltools.converters.mil.mil.ops.defs.ios16.scatter_gather)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS16.scatter_gather.gather_nd", false]], "gather_nd (class in coremltools.converters.mil.mil.ops.defs.ios17.scatter_gather)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather.gather_nd", false]], "gelu (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.gelu", false]], "get_compiled_model_path() (coremltools.models.model.mlmodel method)": [[17, "coremltools.models.model.MLModel.get_compiled_model_path", false]], "get_spec() (coremltools.models.model.mlmodel method)": [[17, "coremltools.models.model.MLModel.get_spec", false]], "get_weights_metadata() (in module coremltools.optimize.coreml)": [[22, "coremltools.optimize.coreml.get_weights_metadata", false], [25, "coremltools.optimize.coreml.get_weights_metadata", false]], "gptq (class in coremltools.optimize.torch.layerwise_compression.algorithms)": [[29, "coremltools.optimize.torch.layerwise_compression.algorithms.GPTQ", false]], "greater (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.greater", false]], "greater_equal (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.greater_equal", false]], "gru (class in coremltools.converters.mil.mil.ops.defs.ios15.recurrent)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.recurrent.gru", false]], "gru (class in coremltools.converters.mil.mil.ops.defs.ios17.recurrent)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.recurrent.gru", false]], "gru (class in coremltools.converters.mil.mil.ops.defs.ios18.recurrent)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS18.recurrent.gru", false]], "identity (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.identity", false]], "image_input_preprocess (class in coremltools.converters.mil.mil.passes.defs.preprocess)": [[14, "coremltools.converters.mil.mil.passes.defs.preprocess.image_input_preprocess", false]], "imagetype (class in coremltools.converters.mil.input_types)": [[12, "coremltools.converters.mil.input_types.ImageType", false]], "index_type (coremltools.models.nearest_neighbors.builder.knearestneighborsclassifierbuilder property)": [[17, "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder.index_type", false]], "inputtype (class in coremltools.converters.mil.input_types)": [[12, "coremltools.converters.mil.input_types.InputType", false]], "inspect_conv_channels() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.inspect_conv_channels", false]], "inspect_innerproduct_channels() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.inspect_innerproduct_channels", false]], "inspect_input_features() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.inspect_input_features", false]], "inspect_layers() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.inspect_layers", false]], "inspect_loss_layers() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.inspect_loss_layers", false]], "inspect_optimizer() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.inspect_optimizer", false]], "inspect_output_features() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.inspect_output_features", false]], "inspect_updatable_layers() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.inspect_updatable_layers", false]], "instance_norm (class in coremltools.converters.mil.mil.ops.defs.ios15.normalization)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.normalization.instance_norm", false]], "instance_norm (class in coremltools.converters.mil.mil.ops.defs.ios17.normalization)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.normalization.instance_norm", false]], "int_op_canonicalization (class in coremltools.converters.mil.mil.passes.defs.optimize_quantization)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_quantization.int_op_canonicalization", false]], "inverse (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.inverse", false]], "inverse (class in coremltools.converters.mil.mil.ops.defs.ios17.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_unary.inverse", false]], "is_updatable (coremltools.models.nearest_neighbors.builder.knearestneighborsclassifierbuilder property)": [[17, "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder.is_updatable", false]], "isflexible() (coremltools.models.neural_network.flexible_shape_utils.neuralnetworkmultiarrayshaperange method)": [[19, "coremltools.models.neural_network.flexible_shape_utils.NeuralNetworkMultiArrayShapeRange.isFlexible", false]], "knearestneighborsclassifierbuilder (class in coremltools.models.nearest_neighbors.builder)": [[17, "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder", false]], "l2_norm (class in coremltools.converters.mil.mil.ops.defs.ios15.normalization)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.normalization.l2_norm", false]], "l2_norm (class in coremltools.converters.mil.mil.ops.defs.ios17.normalization)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.normalization.l2_norm", false]], "l2_pool (class in coremltools.converters.mil.mil.ops.defs.ios15.pool)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.pool.l2_pool", false]], "layer_norm (class in coremltools.converters.mil.mil.ops.defs.ios15.normalization)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.normalization.layer_norm", false]], "layer_norm (class in coremltools.converters.mil.mil.ops.defs.ios17.normalization)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.normalization.layer_norm", false]], "layerwisecompressor (class in coremltools.optimize.torch.layerwise_compression)": [[28, "coremltools.optimize.torch.layerwise_compression.LayerwiseCompressor", false], [29, "coremltools.optimize.torch.layerwise_compression.LayerwiseCompressor", false]], "layerwisecompressorconfig (class in coremltools.optimize.torch.layerwise_compression)": [[28, "coremltools.optimize.torch.layerwise_compression.LayerwiseCompressorConfig", false], [29, "coremltools.optimize.torch.layerwise_compression.LayerwiseCompressorConfig", false]], "leaf_size (coremltools.models.nearest_neighbors.builder.knearestneighborsclassifierbuilder property)": [[17, "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder.leaf_size", false]], "leaky_relu (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.leaky_relu", false]], "leaky_relu (class in coremltools.converters.mil.mil.ops.defs.ios17.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.activation.leaky_relu", false]], "less (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.less", false]], "less_equal (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.less_equal", false]], "license (coremltools.models.nearest_neighbors.builder.knearestneighborsclassifierbuilder property)": [[17, "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder.license", false]], "linear (class in coremltools.converters.mil.mil.ops.defs.ios15.linear)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.linear.linear", false]], "linear (class in coremltools.converters.mil.mil.ops.defs.ios17.linear)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.linear.linear", false]], "linear_activation (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.linear_activation", false]], "linear_activation (class in coremltools.converters.mil.mil.ops.defs.ios17.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.activation.linear_activation", false]], "linear_quantize_activations() (in module coremltools.optimize.coreml.experimental)": [[24, "coremltools.optimize.coreml.experimental.linear_quantize_activations", false]], "linear_quantize_weights() (in module coremltools.optimize.coreml)": [[22, "coremltools.optimize.coreml.linear_quantize_weights", false], [24, "coremltools.optimize.coreml.linear_quantize_weights", false]], "linearquantizer (class in coremltools.optimize.torch.quantization)": [[29, "coremltools.optimize.torch.quantization.LinearQuantizer", false]], "linearquantizerconfig (class in coremltools.optimize.torch.quantization)": [[29, "coremltools.optimize.torch.quantization.LinearQuantizerConfig", false]], "list_gather (class in coremltools.converters.mil.mil.ops.defs.ios15.control_flow)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.control_flow.list_gather", false]], "list_length (class in coremltools.converters.mil.mil.ops.defs.ios15.control_flow)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.control_flow.list_length", false]], "list_read (class in coremltools.converters.mil.mil.ops.defs.ios15.control_flow)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.control_flow.list_read", false]], "list_scatter (class in coremltools.converters.mil.mil.ops.defs.ios15.control_flow)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.control_flow.list_scatter", false]], "list_write (class in coremltools.converters.mil.mil.ops.defs.ios15.control_flow)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.control_flow.list_write", false]], "load_spec() (in module coremltools.models.utils)": [[17, "coremltools.models.utils.load_spec", false]], "local_response_norm (class in coremltools.converters.mil.mil.ops.defs.ios15.normalization)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.normalization.local_response_norm", false]], "local_response_norm (class in coremltools.converters.mil.mil.ops.defs.ios17.normalization)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.normalization.local_response_norm", false]], "log (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.log", false]], "log (class in coremltools.converters.mil.mil.ops.defs.ios17.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_unary.log", false]], "logical_and (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.logical_and", false]], "logical_not (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.logical_not", false]], "logical_or (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.logical_or", false]], "logical_xor (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.logical_xor", false]], "loop_invariant_elimination (class in coremltools.converters.mil.mil.passes.defs.cleanup)": [[14, "coremltools.converters.mil.mil.passes.defs.cleanup.loop_invariant_elimination", false]], "lstm (class in coremltools.converters.mil.mil.ops.defs.ios15.recurrent)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.recurrent.lstm", false]], "lstm (class in coremltools.converters.mil.mil.ops.defs.ios17.recurrent)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.recurrent.lstm", false]], "magnitudepruner (class in coremltools.optimize.torch.pruning)": [[28, "coremltools.optimize.torch.pruning.MagnitudePruner", false]], "magnitudeprunerconfig (class in coremltools.optimize.torch.pruning)": [[28, "coremltools.optimize.torch.pruning.MagnitudePrunerConfig", false]], "make_list (class in coremltools.converters.mil.mil.ops.defs.ios15.control_flow)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.control_flow.make_list", false]], "make_pipeline() (in module coremltools.models.utils)": [[17, "coremltools.models.utils.make_pipeline", false]], "make_state() (coremltools.models.compiledmlmodel method)": [[17, "coremltools.models.CompiledMLModel.make_state", false]], "make_state() (coremltools.models.model.mlmodel method)": [[17, "coremltools.models.model.MLModel.make_state", false]], "make_updatable() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.make_updatable", false]], "materialize_dynamic_shape_mlmodel() (in module coremltools.models.utils)": [[17, "coremltools.models.utils.materialize_dynamic_shape_mlmodel", false]], "materialize_symbolic_shape_program (class in coremltools.converters.mil.mil.passes.defs.symbol_transform)": [[14, "coremltools.converters.mil.mil.passes.defs.symbol_transform.materialize_symbolic_shape_program", false]], "matmul (class in coremltools.converters.mil.mil.ops.defs.ios15.linear)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.linear.matmul", false]], "matmul (class in coremltools.converters.mil.mil.ops.defs.ios17.linear)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.linear.matmul", false]], "matrixmultiplylayerselector (class in coremltools.models.neural_network.quantization_utils)": [[19, "coremltools.models.neural_network.quantization_utils.MatrixMultiplyLayerSelector", false]], "max_pool (class in coremltools.converters.mil.mil.ops.defs.ios15.pool)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.pool.max_pool", false]], "maximum (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.maximum", false]], "merge_affine_dequantize_with_consecutive_ops (class in coremltools.converters.mil.mil.passes.defs.optimize_quantization)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_quantization.merge_affine_dequantize_with_consecutive_ops", false]], "merge_consecutive_paddings (class in coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops.merge_consecutive_paddings", false]], "merge_consecutive_relus (class in coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops.merge_consecutive_relus", false]], "merge_consecutive_reshapes (class in coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops.merge_consecutive_reshapes", false]], "merge_consecutive_transposes (class in coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops.merge_consecutive_transposes", false]], "minimum (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.minimum", false]], "mlmodel (class in coremltools.models.model)": [[17, "coremltools.models.model.MLModel", false]], "mod (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.mod", false]], "modelmetrics (class in coremltools.models.neural_network.quantization_utils)": [[19, "coremltools.models.neural_network.quantization_utils.ModelMetrics", false]], "module": [[9, "module-coremltools.converters._converters_entry", false], [10, "module-coremltools.converters.libsvm._libsvm_converter", false], [11, "module-coremltools.converters.mil.mil", false], [12, "module-coremltools.converters.mil.input_types", false], [13, "module-coremltools.converters.mil.mil.ops.defs.coreml_dialect.ops", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.activation", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.classify", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.control_flow", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.conv", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.linear", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.normalization", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.pool", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.random", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.recurrent", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.reduction", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS16.constexpr_ops", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS16.image_resizing", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS16.scatter_gather", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS16.tensor_operation", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS16.tensor_transformation", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.activation", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.conv", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_unary", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.image_resizing", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.linear", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.normalization", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.quantization_ops", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.recurrent", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.reduction", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.tensor_operation", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transformation", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS18.compression", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS18.recurrent", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS18.states", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS18.tensor_transformation", false], [13, "module-coremltools.converters.mil.mil.ops.defs.iOS18.transformers", false], [14, "module-coremltools.converters.mil.mil.passes.defs.cleanup", false], [14, "module-coremltools.converters.mil.mil.passes.defs.optimize_activation", false], [14, "module-coremltools.converters.mil.mil.passes.defs.optimize_conv", false], [14, "module-coremltools.converters.mil.mil.passes.defs.optimize_elementwise_binary", false], [14, "module-coremltools.converters.mil.mil.passes.defs.optimize_linear", false], [14, "module-coremltools.converters.mil.mil.passes.defs.optimize_normalization", false], [14, "module-coremltools.converters.mil.mil.passes.defs.optimize_quantization", false], [14, "module-coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops", false], [14, "module-coremltools.converters.mil.mil.passes.defs.optimize_state", false], [14, "module-coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation", false], [14, "module-coremltools.converters.mil.mil.passes.defs.preprocess", false], [14, "module-coremltools.converters.mil.mil.passes.defs.quantization", false], [14, "module-coremltools.converters.mil.mil.passes.defs.symbol_transform", false], [15, "module-coremltools.converters.sklearn._converter", false], [16, "module-coremltools.converters.xgboost._tree", false], [17, "module-coremltools.converters.mil.debugging_utils", false], [17, "module-coremltools.models.array_feature_extractor", false], [17, "module-coremltools.models.feature_vectorizer", false], [17, "module-coremltools.models.model", false], [17, "module-coremltools.models.nearest_neighbors.builder", false], [17, "module-coremltools.models.pipeline", false], [17, "module-coremltools.models.tree_ensemble", false], [17, "module-coremltools.models.utils", false], [18, "module-coremltools.models.ml_program.compression_utils", false], [19, "module-coremltools.models.neural_network.builder", false], [19, "module-coremltools.models.neural_network.flexible_shape_utils", false], [19, "module-coremltools.models.neural_network.quantization_utils", false], [19, "module-coremltools.models.neural_network.update_optimizer_utils", false], [21, "module-coremltools.optimize.coreml", false], [22, "module-coremltools.optimize.coreml", false], [23, "module-coremltools.optimize.coreml", false], [24, "module-coremltools.optimize.coreml", false], [25, "module-coremltools.optimize.coreml", false]], "moduledkmpalettizerconfig (class in coremltools.optimize.torch.palettization)": [[27, "coremltools.optimize.torch.palettization.ModuleDKMPalettizerConfig", false]], "modulegptqconfig (class in coremltools.optimize.torch.layerwise_compression.algorithms)": [[29, "coremltools.optimize.torch.layerwise_compression.algorithms.ModuleGPTQConfig", false]], "modulelinearquantizerconfig (class in coremltools.optimize.torch.quantization)": [[29, "coremltools.optimize.torch.quantization.ModuleLinearQuantizerConfig", false]], "modulemagnitudeprunerconfig (class in coremltools.optimize.torch.pruning)": [[28, "coremltools.optimize.torch.pruning.ModuleMagnitudePrunerConfig", false]], "moduleposttrainingpalettizerconfig (class in coremltools.optimize.torch.palettization)": [[27, "coremltools.optimize.torch.palettization.ModulePostTrainingPalettizerConfig", false]], "moduleposttrainingquantizerconfig (class in coremltools.optimize.torch.quantization)": [[29, "coremltools.optimize.torch.quantization.ModulePostTrainingQuantizerConfig", false]], "moduleskmpalettizerconfig (class in coremltools.optimize.torch.palettization)": [[27, "coremltools.optimize.torch.palettization.ModuleSKMPalettizerConfig", false]], "modulesparsegptconfig (class in coremltools.optimize.torch.layerwise_compression.algorithms)": [[28, "coremltools.optimize.torch.layerwise_compression.algorithms.ModuleSparseGPTConfig", false]], "mul (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.mul", false]], "multifunctiondescriptor (class in coremltools.models.utils)": [[17, "coremltools.models.utils.MultiFunctionDescriptor", false]], "neuralnetworkbuilder (class in coremltools.models.neural_network.builder)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder", false]], "neuralnetworkimagesize (class in coremltools.models.neural_network.flexible_shape_utils)": [[19, "coremltools.models.neural_network.flexible_shape_utils.NeuralNetworkImageSize", false]], "neuralnetworkimagesizerange (class in coremltools.models.neural_network.flexible_shape_utils)": [[19, "coremltools.models.neural_network.flexible_shape_utils.NeuralNetworkImageSizeRange", false]], "neuralnetworkmultiarrayshape (class in coremltools.models.neural_network.flexible_shape_utils)": [[19, "coremltools.models.neural_network.flexible_shape_utils.NeuralNetworkMultiArrayShape", false]], "neuralnetworkmultiarrayshaperange (class in coremltools.models.neural_network.flexible_shape_utils)": [[19, "coremltools.models.neural_network.flexible_shape_utils.NeuralNetworkMultiArrayShapeRange", false]], "non_maximum_suppression (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.non_maximum_suppression", false]], "non_maximum_suppression (class in coremltools.converters.mil.mil.ops.defs.ios17.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.tensor_operation.non_maximum_suppression", false]], "non_zero (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.non_zero", false]], "noop_elimination (class in coremltools.converters.mil.mil.passes.defs.cleanup)": [[14, "coremltools.converters.mil.mil.passes.defs.cleanup.noop_elimination", false]], "not_equal (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.not_equal", false]], "nullify_redundant_quantization_zero_point (class in coremltools.converters.mil.mil.passes.defs.optimize_quantization)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_quantization.nullify_redundant_quantization_zero_point", false]], "number_of_dimensions (coremltools.models.nearest_neighbors.builder.knearestneighborsclassifierbuilder property)": [[17, "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder.number_of_dimensions", false]], "number_of_neighbors (coremltools.models.nearest_neighbors.builder.knearestneighborsclassifierbuilder property)": [[17, "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder.number_of_neighbors", false]], "number_of_neighbors_allowed_range() (coremltools.models.nearest_neighbors.builder.knearestneighborsclassifierbuilder method)": [[17, "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder.number_of_neighbors_allowed_range", false]], "number_of_neighbors_allowed_set() (coremltools.models.nearest_neighbors.builder.knearestneighborsclassifierbuilder method)": [[17, "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder.number_of_neighbors_allowed_set", false]], "observertype (class in coremltools.optimize.torch.quantization)": [[29, "coremltools.optimize.torch.quantization.ObserverType", false]], "one_hot (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.one_hot", false]], "oplinearquantizerconfig (class in coremltools.optimize.coreml)": [[24, "coremltools.optimize.coreml.OpLinearQuantizerConfig", false]], "opmagnitudeprunerconfig (class in coremltools.optimize.coreml)": [[23, "coremltools.optimize.coreml.OpMagnitudePrunerConfig", false]], "oppalettizerconfig (class in coremltools.optimize.coreml)": [[21, "coremltools.optimize.coreml.OpPalettizerConfig", false]], "opthresholdprunerconfig (class in coremltools.optimize.coreml)": [[23, "coremltools.optimize.coreml.OpThresholdPrunerConfig", false]], "optimizationconfig (class in coremltools.optimize.coreml)": [[25, "coremltools.optimize.coreml.OptimizationConfig", false]], "outputmetric (class in coremltools.models.neural_network.quantization_utils)": [[19, "coremltools.models.neural_network.quantization_utils.OutputMetric", false]], "pad (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.pad", false]], "palettize_weights() (in module coremltools.models.ml_program.compression_utils)": [[18, "coremltools.models.ml_program.compression_utils.palettize_weights", false]], "palettize_weights() (in module coremltools.optimize.coreml)": [[21, "coremltools.optimize.coreml.palettize_weights", false], [22, "coremltools.optimize.coreml.palettize_weights", false]], "pipeline (class in coremltools.models.pipeline)": [[17, "coremltools.models.pipeline.Pipeline", false]], "pipelineclassifier (class in coremltools.models.pipeline)": [[17, "coremltools.models.pipeline.PipelineClassifier", false]], "pipelineregressor (class in coremltools.models.pipeline)": [[17, "coremltools.models.pipeline.PipelineRegressor", false]], "pixel_shuffle (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation.pixel_shuffle", false]], "pixel_unshuffle (class in coremltools.converters.mil.mil.ops.defs.ios16.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS16.tensor_transformation.pixel_unshuffle", false]], "polynomialdecayscheduler (class in coremltools.optimize.torch.pruning.pruning_scheduler)": [[28, "coremltools.optimize.torch.pruning.pruning_scheduler.PolynomialDecayScheduler", false]], "posttrainingpalettizer (class in coremltools.optimize.torch.palettization)": [[27, "coremltools.optimize.torch.palettization.PostTrainingPalettizer", false]], "posttrainingpalettizerconfig (class in coremltools.optimize.torch.palettization)": [[27, "coremltools.optimize.torch.palettization.PostTrainingPalettizerConfig", false]], "posttrainingquantizer (class in coremltools.optimize.torch.quantization)": [[29, "coremltools.optimize.torch.quantization.PostTrainingQuantizer", false]], "posttrainingquantizerconfig (class in coremltools.optimize.torch.quantization)": [[29, "coremltools.optimize.torch.quantization.PostTrainingQuantizerConfig", false]], "pow (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.pow", false]], "predict() (coremltools.models.compiledmlmodel method)": [[17, "coremltools.models.CompiledMLModel.predict", false]], "predict() (coremltools.models.model.mlmodel method)": [[17, "coremltools.models.model.MLModel.predict", false]], "prefer_state_in_downstream (class in coremltools.converters.mil.mil.passes.defs.optimize_state)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_state.prefer_state_in_downstream", false]], "prelu (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.prelu", false]], "prelu (class in coremltools.converters.mil.mil.ops.defs.ios17.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.activation.prelu", false]], "prelu_to_lrelu (class in coremltools.converters.mil.mil.passes.defs.optimize_activation)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_activation.prelu_to_lrelu", false]], "prepare() (coremltools.optimize.torch.palettization.dkmpalettizer method)": [[27, "coremltools.optimize.torch.palettization.DKMPalettizer.prepare", false]], "prepare() (coremltools.optimize.torch.pruning.magnitudepruner method)": [[28, "coremltools.optimize.torch.pruning.MagnitudePruner.prepare", false]], "prepare() (coremltools.optimize.torch.quantization.linearquantizer method)": [[29, "coremltools.optimize.torch.quantization.LinearQuantizer.prepare", false]], "program() (coremltools.converters.mil.mil.builder static method)": [[11, "coremltools.converters.mil.mil.Builder.program", false]], "prune_weights() (in module coremltools.optimize.coreml)": [[22, "coremltools.optimize.coreml.prune_weights", false], [23, "coremltools.optimize.coreml.prune_weights", false]], "pruningscheduler (class in coremltools.optimize.torch.pruning.pruning_scheduler)": [[28, "coremltools.optimize.torch.pruning.pruning_scheduler.PruningScheduler", false]], "quantizationscheme (class in coremltools.optimize.torch.quantization)": [[29, "coremltools.optimize.torch.quantization.QuantizationScheme", false]], "quantize (class in coremltools.converters.mil.mil.ops.defs.ios17.quantization_ops)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.quantization_ops.quantize", false]], "quantize_weights() (in module coremltools.models.neural_network.quantization_utils)": [[19, "coremltools.models.neural_network.quantization_utils.quantize_weights", false]], "quantizedlayerselector (class in coremltools.models.neural_network.quantization_utils)": [[19, "coremltools.models.neural_network.quantization_utils.QuantizedLayerSelector", false]], "random_bernoulli (class in coremltools.converters.mil.mil.ops.defs.ios15.random)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.random.random_bernoulli", false]], "random_categorical (class in coremltools.converters.mil.mil.ops.defs.ios15.random)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.random.random_categorical", false]], "random_normal (class in coremltools.converters.mil.mil.ops.defs.ios15.random)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.random.random_normal", false]], "random_uniform (class in coremltools.converters.mil.mil.ops.defs.ios15.random)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.random.random_uniform", false]], "randomize_weights() (in module coremltools.models.utils)": [[17, "coremltools.models.utils.randomize_weights", false]], "range_1d (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.range_1d", false]], "rangedim (class in coremltools.converters.mil.input_types)": [[12, "coremltools.converters.mil.input_types.RangeDim", false]], "rangeparam (class in coremltools.models.neural_network.update_optimizer_utils)": [[19, "coremltools.models.neural_network.update_optimizer_utils.RangeParam", false]], "rank0_expand_dims_swap (class in coremltools.converters.mil.mil.passes.defs.optimize_elementwise_binary)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_elementwise_binary.rank0_expand_dims_swap", false]], "read_state (class in coremltools.converters.mil.mil.ops.defs.ios18.states)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS18.states.read_state", false]], "real_div (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.real_div", false]], "reduce_argmax (class in coremltools.converters.mil.mil.ops.defs.ios15.reduction)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.reduction.reduce_argmax", false]], "reduce_argmax (class in coremltools.converters.mil.mil.ops.defs.ios17.reduction)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.reduction.reduce_argmax", false]], "reduce_argmin (class in coremltools.converters.mil.mil.ops.defs.ios15.reduction)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.reduction.reduce_argmin", false]], "reduce_argmin (class in coremltools.converters.mil.mil.ops.defs.ios17.reduction)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.reduction.reduce_argmin", false]], "reduce_l1_norm (class in coremltools.converters.mil.mil.ops.defs.ios15.reduction)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.reduction.reduce_l1_norm", false]], "reduce_l2_norm (class in coremltools.converters.mil.mil.ops.defs.ios15.reduction)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.reduction.reduce_l2_norm", false]], "reduce_log_sum (class in coremltools.converters.mil.mil.ops.defs.ios15.reduction)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.reduction.reduce_log_sum", false]], "reduce_log_sum_exp (class in coremltools.converters.mil.mil.ops.defs.ios15.reduction)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.reduction.reduce_log_sum_exp", false]], "reduce_max (class in coremltools.converters.mil.mil.ops.defs.ios15.reduction)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.reduction.reduce_max", false]], "reduce_mean (class in coremltools.converters.mil.mil.ops.defs.ios15.reduction)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.reduction.reduce_mean", false]], "reduce_min (class in coremltools.converters.mil.mil.ops.defs.ios15.reduction)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.reduction.reduce_min", false]], "reduce_prod (class in coremltools.converters.mil.mil.ops.defs.ios15.reduction)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.reduction.reduce_prod", false]], "reduce_sum (class in coremltools.converters.mil.mil.ops.defs.ios15.reduction)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.reduction.reduce_sum", false]], "reduce_sum_square (class in coremltools.converters.mil.mil.ops.defs.ios15.reduction)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.reduction.reduce_sum_square", false]], "reduce_transposes (class in coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops.reduce_transposes", false]], "relu (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.relu", false]], "relu6 (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.relu6", false]], "remove_function() (coremltools.models.utils.multifunctiondescriptor method)": [[17, "coremltools.models.utils.MultiFunctionDescriptor.remove_function", false]], "remove_redundant_ops (class in coremltools.converters.mil.mil.passes.defs.cleanup)": [[14, "coremltools.converters.mil.mil.passes.defs.cleanup.remove_redundant_ops", false]], "remove_symbolic_reshape (class in coremltools.converters.mil.mil.passes.defs.cleanup)": [[14, "coremltools.converters.mil.mil.passes.defs.cleanup.remove_symbolic_reshape", false]], "rename_feature() (in module coremltools.models.utils)": [[17, "coremltools.models.utils.rename_feature", false]], "replace_stack_reshape (class in coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation.replace_stack_reshape", false]], "report() (coremltools.optimize.torch.palettization.dkmpalettizer method)": [[27, "coremltools.optimize.torch.palettization.DKMPalettizer.report", false]], "report() (coremltools.optimize.torch.pruning.magnitudepruner method)": [[28, "coremltools.optimize.torch.pruning.MagnitudePruner.report", false]], "report() (coremltools.optimize.torch.quantization.linearquantizer method)": [[29, "coremltools.optimize.torch.quantization.LinearQuantizer.report", false]], "resample (class in coremltools.converters.mil.mil.ops.defs.ios15.image_resizing)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing.resample", false]], "resample (class in coremltools.converters.mil.mil.ops.defs.ios16.image_resizing)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS16.image_resizing.resample", false]], "resample (class in coremltools.converters.mil.mil.ops.defs.ios17.image_resizing)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.image_resizing.resample", false]], "reshape (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation.reshape", false]], "reshape (class in coremltools.converters.mil.mil.ops.defs.ios17.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transformation.reshape", false]], "reshape_like (class in coremltools.converters.mil.mil.ops.defs.ios16.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS16.tensor_transformation.reshape_like", false]], "reshape_like (class in coremltools.converters.mil.mil.ops.defs.ios17.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transformation.reshape_like", false]], "resize (class in coremltools.converters.mil.mil.ops.defs.ios17.image_resizing)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.image_resizing.resize", false]], "resize_bilinear (class in coremltools.converters.mil.mil.ops.defs.ios15.image_resizing)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing.resize_bilinear", false]], "resize_nearest_neighbor (class in coremltools.converters.mil.mil.ops.defs.ios15.image_resizing)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing.resize_nearest_neighbor", false]], "reverse (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation.reverse", false]], "reverse (class in coremltools.converters.mil.mil.ops.defs.ios17.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transformation.reverse", false]], "reverse_sequence (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation.reverse_sequence", false]], "reverse_sequence (class in coremltools.converters.mil.mil.ops.defs.ios17.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transformation.reverse_sequence", false]], "rnn (class in coremltools.converters.mil.mil.ops.defs.ios15.recurrent)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.recurrent.rnn", false]], "rnn (class in coremltools.converters.mil.mil.ops.defs.ios17.recurrent)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.recurrent.rnn", false]], "round (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.round", false]], "rsqrt (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.rsqrt", false]], "rsqrt (class in coremltools.converters.mil.mil.ops.defs.ios17.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_unary.rsqrt", false]], "sanitize_input_output_names (class in coremltools.converters.mil.mil.passes.defs.preprocess)": [[14, "coremltools.converters.mil.mil.passes.defs.preprocess.sanitize_input_output_names", false]], "save() (coremltools.models.model.mlmodel method)": [[17, "coremltools.models.model.MLModel.save", false]], "save_multifunction() (in module coremltools.models.utils)": [[17, "coremltools.models.utils.save_multifunction", false]], "save_spec() (in module coremltools.models.utils)": [[17, "coremltools.models.utils.save_spec", false]], "scaled_dot_product_attention (class in coremltools.converters.mil.mil.ops.defs.ios18.transformers)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS18.transformers.scaled_dot_product_attention", false]], "scaled_tanh (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.scaled_tanh", false]], "scaled_tanh (class in coremltools.converters.mil.mil.ops.defs.ios17.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.activation.scaled_tanh", false]], "scatter (class in coremltools.converters.mil.mil.ops.defs.ios15.scatter_gather)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather.scatter", false]], "scatter (class in coremltools.converters.mil.mil.ops.defs.ios17.scatter_gather)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather.scatter", false]], "scatter_along_axis (class in coremltools.converters.mil.mil.ops.defs.ios15.scatter_gather)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather.scatter_along_axis", false]], "scatter_along_axis (class in coremltools.converters.mil.mil.ops.defs.ios17.scatter_gather)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather.scatter_along_axis", false]], "scatter_nd (class in coremltools.converters.mil.mil.ops.defs.ios15.scatter_gather)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather.scatter_nd", false]], "scatter_nd (class in coremltools.converters.mil.mil.ops.defs.ios17.scatter_gather)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather.scatter_nd", false]], "select (class in coremltools.converters.mil.mil.ops.defs.ios15.control_flow)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.control_flow.select", false]], "select_optimization (class in coremltools.converters.mil.mil.passes.defs.optimize_elementwise_binary)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_elementwise_binary.select_optimization", false]], "set_categorical_cross_entropy_loss() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.set_categorical_cross_entropy_loss", false]], "set_class_labels() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.set_class_labels", false]], "set_default_prediction_value() (coremltools.models.tree_ensemble.treeensemblebase method)": [[17, "coremltools.models.tree_ensemble.TreeEnsembleBase.set_default_prediction_value", false]], "set_global() (coremltools.optimize.coreml.optimizationconfig method)": [[25, "coremltools.optimize.coreml.OptimizationConfig.set_global", false]], "set_global() (coremltools.optimize.torch.palettization.dkmpalettizerconfig method)": [[27, "coremltools.optimize.torch.palettization.DKMPalettizerConfig.set_global", false]], "set_global() (coremltools.optimize.torch.pruning.magnitudeprunerconfig method)": [[28, "coremltools.optimize.torch.pruning.MagnitudePrunerConfig.set_global", false]], "set_global() (coremltools.optimize.torch.quantization.linearquantizerconfig method)": [[29, "coremltools.optimize.torch.quantization.LinearQuantizerConfig.set_global", false]], "set_index_type() (coremltools.models.nearest_neighbors.builder.knearestneighborsclassifierbuilder method)": [[17, "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder.set_index_type", false]], "set_input() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.set_input", false]], "set_mean_squared_error_loss() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.set_mean_squared_error_loss", false]], "set_module_name() (coremltools.optimize.torch.palettization.dkmpalettizerconfig method)": [[27, "coremltools.optimize.torch.palettization.DKMPalettizerConfig.set_module_name", false]], "set_module_name() (coremltools.optimize.torch.pruning.magnitudeprunerconfig method)": [[28, "coremltools.optimize.torch.pruning.MagnitudePrunerConfig.set_module_name", false]], "set_module_name() (coremltools.optimize.torch.quantization.linearquantizerconfig method)": [[29, "coremltools.optimize.torch.quantization.LinearQuantizerConfig.set_module_name", false]], "set_module_type() (coremltools.optimize.torch.palettization.dkmpalettizerconfig method)": [[27, "coremltools.optimize.torch.palettization.DKMPalettizerConfig.set_module_type", false]], "set_module_type() (coremltools.optimize.torch.pruning.magnitudeprunerconfig method)": [[28, "coremltools.optimize.torch.pruning.MagnitudePrunerConfig.set_module_type", false]], "set_module_type() (coremltools.optimize.torch.quantization.linearquantizerconfig method)": [[29, "coremltools.optimize.torch.quantization.LinearQuantizerConfig.set_module_type", false]], "set_multiarray_ndshape_range() (in module coremltools.models.neural_network.flexible_shape_utils)": [[19, "coremltools.models.neural_network.flexible_shape_utils.set_multiarray_ndshape_range", false]], "set_number_of_neighbors_with_bounds() (coremltools.models.nearest_neighbors.builder.knearestneighborsclassifierbuilder method)": [[17, "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder.set_number_of_neighbors_with_bounds", false]], "set_op_name() (coremltools.optimize.coreml.optimizationconfig method)": [[25, "coremltools.optimize.coreml.OptimizationConfig.set_op_name", false]], "set_op_type() (coremltools.optimize.coreml.optimizationconfig method)": [[25, "coremltools.optimize.coreml.OptimizationConfig.set_op_type", false]], "set_optional_input() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.set_optional_input", false]], "set_output() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.set_output", false]], "set_post_evaluation_transform() (coremltools.models.tree_ensemble.treeensemblebase method)": [[17, "coremltools.models.tree_ensemble.TreeEnsembleBase.set_post_evaluation_transform", false]], "set_pre_processing_parameters() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.set_pre_processing_parameters", false]], "set_training_input() (coremltools.models.neural_network.builder.neuralnetworkbuilder method)": [[19, "coremltools.models.neural_network.builder.NeuralNetworkBuilder.set_training_input", false]], "set_training_input() (coremltools.models.pipeline.pipeline method)": [[17, "coremltools.models.pipeline.Pipeline.set_training_input", false]], "set_training_input() (coremltools.models.pipeline.pipelineclassifier method)": [[17, "coremltools.models.pipeline.PipelineClassifier.set_training_input", false]], "set_training_input() (coremltools.models.pipeline.pipelineregressor method)": [[17, "coremltools.models.pipeline.PipelineRegressor.set_training_input", false]], "sgdparams (class in coremltools.models.neural_network.update_optimizer_utils)": [[19, "coremltools.models.neural_network.update_optimizer_utils.SgdParams", false]], "shape (class in coremltools.converters.mil.input_types)": [[12, "coremltools.converters.mil.input_types.Shape", false]], "shape (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.shape", false]], "sigmoid (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.sigmoid", false]], "sigmoid_hard (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.sigmoid_hard", false]], "sigmoid_hard (class in coremltools.converters.mil.mil.ops.defs.ios17.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.activation.sigmoid_hard", false]], "sign (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.sign", false]], "silu (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.silu", false]], "sin (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.sin", false]], "sinh (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.sinh", false]], "skmpalettizer (class in coremltools.optimize.torch.palettization)": [[27, "coremltools.optimize.torch.palettization.SKMPalettizer", false]], "skmpalettizerconfig (class in coremltools.optimize.torch.palettization)": [[27, "coremltools.optimize.torch.palettization.SKMPalettizerConfig", false]], "slice_by_index (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation.slice_by_index", false]], "slice_by_index (class in coremltools.converters.mil.mil.ops.defs.ios17.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transformation.slice_by_index", false]], "slice_by_size (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation.slice_by_size", false]], "slice_by_size (class in coremltools.converters.mil.mil.ops.defs.ios17.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transformation.slice_by_size", false]], "slice_update (class in coremltools.converters.mil.mil.ops.defs.ios18.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS18.tensor_transformation.slice_update", false]], "sliding_windows (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation.sliding_windows", false]], "sliding_windows (class in coremltools.converters.mil.mil.ops.defs.ios17.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transformation.sliding_windows", false]], "softmax (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.softmax", false]], "softplus (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.softplus", false]], "softplus_parametric (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.softplus_parametric", false]], "softplus_parametric (class in coremltools.converters.mil.mil.ops.defs.ios17.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.activation.softplus_parametric", false]], "softsign (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.softsign", false]], "space_to_batch (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation.space_to_batch", false]], "space_to_depth (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation.space_to_depth", false]], "sparsegpt (class in coremltools.optimize.torch.layerwise_compression.algorithms)": [[28, "coremltools.optimize.torch.layerwise_compression.algorithms.SparseGPT", false]], "sparsify_weights() (in module coremltools.models.ml_program.compression_utils)": [[18, "coremltools.models.ml_program.compression_utils.sparsify_weights", false]], "split (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.split", false]], "sqrt (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.sqrt", false]], "square (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.square", false]], "squeeze (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation.squeeze", false]], "squeeze (class in coremltools.converters.mil.mil.ops.defs.ios17.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transformation.squeeze", false]], "stack (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.stack", false]], "statetype (class in coremltools.converters.mil.input_types)": [[12, "coremltools.converters.mil.input_types.StateType", false]], "step() (coremltools.optimize.torch.palettization.dkmpalettizer method)": [[27, "coremltools.optimize.torch.palettization.DKMPalettizer.step", false]], "step() (coremltools.optimize.torch.pruning.magnitudepruner method)": [[28, "coremltools.optimize.torch.pruning.MagnitudePruner.step", false]], "step() (coremltools.optimize.torch.quantization.linearquantizer method)": [[29, "coremltools.optimize.torch.quantization.LinearQuantizer.step", false]], "sub (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_binary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary.sub", false]], "tan (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.tan", false]], "tanh (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.tanh", false]], "tensortype (class in coremltools.converters.mil.input_types)": [[12, "coremltools.converters.mil.input_types.TensorType", false]], "threshold (class in coremltools.converters.mil.mil.ops.defs.ios15.elementwise_unary)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary.threshold", false]], "thresholded_relu (class in coremltools.converters.mil.mil.ops.defs.ios15.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.activation.thresholded_relu", false]], "thresholded_relu (class in coremltools.converters.mil.mil.ops.defs.ios17.activation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.activation.thresholded_relu", false]], "tile (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.tile", false]], "topk (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation.topk", false]], "topk (class in coremltools.converters.mil.mil.ops.defs.ios16.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS16.tensor_operation.topk", false]], "topk (class in coremltools.converters.mil.mil.ops.defs.ios17.tensor_operation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.tensor_operation.topk", false]], "topological_reorder (class in coremltools.converters.mil.mil.passes.defs.cleanup)": [[14, "coremltools.converters.mil.mil.passes.defs.cleanup.topological_reorder", false]], "transpose (class in coremltools.converters.mil.mil.ops.defs.ios15.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation.transpose", false]], "transpose (class in coremltools.converters.mil.mil.ops.defs.ios17.tensor_transformation)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transformation.transpose", false]], "treeensemblebase (class in coremltools.models.tree_ensemble)": [[17, "coremltools.models.tree_ensemble.TreeEnsembleBase", false]], "treeensembleclassifier (class in coremltools.models.tree_ensemble)": [[17, "coremltools.models.tree_ensemble.TreeEnsembleClassifier", false]], "treeensembleregressor (class in coremltools.models.tree_ensemble)": [[17, "coremltools.models.tree_ensemble.TreeEnsembleRegressor", false]], "update_image_size_range() (in module coremltools.models.neural_network.flexible_shape_utils)": [[19, "coremltools.models.neural_network.flexible_shape_utils.update_image_size_range", false]], "update_multiarray_shape_range() (in module coremltools.models.neural_network.flexible_shape_utils)": [[19, "coremltools.models.neural_network.flexible_shape_utils.update_multiarray_shape_range", false]], "update_output_dtypes (class in coremltools.converters.mil.mil.passes.defs.preprocess)": [[14, "coremltools.converters.mil.mil.passes.defs.preprocess.update_output_dtypes", false]], "upsample_bilinear (class in coremltools.converters.mil.mil.ops.defs.ios15.image_resizing)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing.upsample_bilinear", false]], "upsample_bilinear (class in coremltools.converters.mil.mil.ops.defs.ios16.image_resizing)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS16.image_resizing.upsample_bilinear", false]], "upsample_nearest_neighbor (class in coremltools.converters.mil.mil.ops.defs.ios15.image_resizing)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing.upsample_nearest_neighbor", false]], "use_reflection_padding (class in coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation)": [[14, "coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation.use_reflection_padding", false]], "weighting_scheme (coremltools.models.nearest_neighbors.builder.knearestneighborsclassifierbuilder property)": [[17, "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder.weighting_scheme", false]], "while_loop (class in coremltools.converters.mil.mil.ops.defs.ios15.control_flow)": [[13, "coremltools.converters.mil.mil.ops.defs.iOS15.control_flow.while_loop", false]]}, "objects": {"coremltools.converters": [[9, 0, 0, "-", "_converters_entry"]], "coremltools.converters._converters_entry": [[9, 1, 1, "", "convert"]], "coremltools.converters.libsvm": [[10, 0, 0, "-", "_libsvm_converter"]], "coremltools.converters.libsvm._libsvm_converter": [[10, 1, 1, "", "convert"]], "coremltools.converters.mil": [[17, 0, 0, "-", "debugging_utils"], [12, 0, 0, "-", "input_types"], [11, 0, 0, "-", "mil"]], "coremltools.converters.mil.debugging_utils": [[17, 1, 1, "", "extract_submodel"]], "coremltools.converters.mil.input_types": [[12, 2, 1, "", "ClassifierConfig"], [12, 2, 1, "", "EnumeratedShapes"], [12, 2, 1, "", "ImageType"], [12, 2, 1, "", "InputType"], [12, 2, 1, "", "RangeDim"], [12, 2, 1, "", "Shape"], [12, 2, 1, "", "StateType"], [12, 2, 1, "", "TensorType"]], "coremltools.converters.mil.input_types.ClassifierConfig": [[12, 3, 1, "", "__init__"]], "coremltools.converters.mil.input_types.EnumeratedShapes": [[12, 3, 1, "", "__init__"]], "coremltools.converters.mil.input_types.ImageType": [[12, 3, 1, "", "__init__"]], "coremltools.converters.mil.input_types.InputType": [[12, 3, 1, "", "__init__"]], "coremltools.converters.mil.input_types.RangeDim": [[12, 3, 1, "", "__init__"]], "coremltools.converters.mil.input_types.Shape": [[12, 3, 1, "", "__init__"]], "coremltools.converters.mil.input_types.StateType": [[12, 3, 1, "", "__init__"]], "coremltools.converters.mil.input_types.TensorType": [[12, 3, 1, "", "__init__"]], "coremltools.converters.mil.mil": [[11, 2, 1, "", "Builder"]], "coremltools.converters.mil.mil.Builder": [[11, 3, 1, "", "program"]], "coremltools.converters.mil.mil.ops.defs.coreml_dialect": [[13, 0, 0, "-", "ops"]], "coremltools.converters.mil.mil.ops.defs.coreml_dialect.ops": [[13, 2, 1, "", "coreml_update_state"]], "coremltools.converters.mil.mil.ops.defs.iOS15": [[13, 0, 0, "-", "activation"], [13, 0, 0, "-", "classify"], [13, 0, 0, "-", "control_flow"], [13, 0, 0, "-", "conv"], [13, 0, 0, "-", "elementwise_binary"], [13, 0, 0, "-", "elementwise_unary"], [13, 0, 0, "-", "image_resizing"], [13, 0, 0, "-", "linear"], [13, 0, 0, "-", "normalization"], [13, 0, 0, "-", "pool"], [13, 0, 0, "-", "random"], [13, 0, 0, "-", "recurrent"], [13, 0, 0, "-", "reduction"], [13, 0, 0, "-", "scatter_gather"], [13, 0, 0, "-", "tensor_operation"], [13, 0, 0, "-", "tensor_transformation"]], "coremltools.converters.mil.mil.ops.defs.iOS15.activation": [[13, 2, 1, "", "clamped_relu"], [13, 2, 1, "", "elu"], [13, 2, 1, "", "gelu"], [13, 2, 1, "", "leaky_relu"], [13, 2, 1, "", "linear_activation"], [13, 2, 1, "", "prelu"], [13, 2, 1, "", "relu"], [13, 2, 1, "", "relu6"], [13, 2, 1, "", "scaled_tanh"], [13, 2, 1, "", "sigmoid"], [13, 2, 1, "", "sigmoid_hard"], [13, 2, 1, "", "silu"], [13, 2, 1, "", "softmax"], [13, 2, 1, "", "softplus"], [13, 2, 1, "", "softplus_parametric"], [13, 2, 1, "", "softsign"], [13, 2, 1, "", "thresholded_relu"]], "coremltools.converters.mil.mil.ops.defs.iOS15.classify": [[13, 2, 1, "", "classify"]], "coremltools.converters.mil.mil.ops.defs.iOS15.control_flow": [[13, 2, 1, "", "Const"], [13, 2, 1, "", "cond"], [13, 2, 1, "", "list_gather"], [13, 2, 1, "", "list_length"], [13, 2, 1, "", "list_read"], [13, 2, 1, "", "list_scatter"], [13, 2, 1, "", "list_write"], [13, 2, 1, "", "make_list"], [13, 2, 1, "", "select"], [13, 2, 1, "", "while_loop"]], "coremltools.converters.mil.mil.ops.defs.iOS15.conv": [[13, 2, 1, "", "conv"], [13, 2, 1, "", "conv_transpose"]], "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary": [[13, 2, 1, "", "add"], [13, 2, 1, "", "equal"], [13, 2, 1, "", "floor_div"], [13, 2, 1, "", "greater"], [13, 2, 1, "", "greater_equal"], [13, 2, 1, "", "less"], [13, 2, 1, "", "less_equal"], [13, 2, 1, "", "logical_and"], [13, 2, 1, "", "logical_or"], [13, 2, 1, "", "logical_xor"], [13, 2, 1, "", "maximum"], [13, 2, 1, "", "minimum"], [13, 2, 1, "", "mod"], [13, 2, 1, "", "mul"], [13, 2, 1, "", "not_equal"], [13, 2, 1, "", "pow"], [13, 2, 1, "", "real_div"], [13, 2, 1, "", "sub"]], "coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary": [[13, 2, 1, "", "abs"], [13, 2, 1, "", "acos"], [13, 2, 1, "", "asin"], [13, 2, 1, "", "atan"], [13, 2, 1, "", "atanh"], [13, 2, 1, "", "cast"], [13, 2, 1, "", "ceil"], [13, 2, 1, "", "clip"], [13, 2, 1, "", "cos"], [13, 2, 1, "", "cosh"], [13, 2, 1, "", "erf"], [13, 2, 1, "", "exp"], [13, 2, 1, "", "exp2"], [13, 2, 1, "", "floor"], [13, 2, 1, "", "inverse"], [13, 2, 1, "", "log"], [13, 2, 1, "", "logical_not"], [13, 2, 1, "", "round"], [13, 2, 1, "", "rsqrt"], [13, 2, 1, "", "sign"], [13, 2, 1, "", "sin"], [13, 2, 1, "", "sinh"], [13, 2, 1, "", "sqrt"], [13, 2, 1, "", "square"], [13, 2, 1, "", "tan"], [13, 2, 1, "", "tanh"], [13, 2, 1, "", "threshold"]], "coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing": [[13, 2, 1, "", "affine"], [13, 2, 1, "", "crop"], [13, 2, 1, "", "crop_resize"], [13, 2, 1, "", "resample"], [13, 2, 1, "", "resize_bilinear"], [13, 2, 1, "", "resize_nearest_neighbor"], [13, 2, 1, "", "upsample_bilinear"], [13, 2, 1, "", "upsample_nearest_neighbor"]], "coremltools.converters.mil.mil.ops.defs.iOS15.linear": [[13, 2, 1, "", "einsum"], [13, 2, 1, "", "linear"], [13, 2, 1, "", "matmul"]], "coremltools.converters.mil.mil.ops.defs.iOS15.normalization": [[13, 2, 1, "", "batch_norm"], [13, 2, 1, "", "instance_norm"], [13, 2, 1, "", "l2_norm"], [13, 2, 1, "", "layer_norm"], [13, 2, 1, "", "local_response_norm"]], "coremltools.converters.mil.mil.ops.defs.iOS15.pool": [[13, 2, 1, "", "avg_pool"], [13, 2, 1, "", "l2_pool"], [13, 2, 1, "", "max_pool"]], "coremltools.converters.mil.mil.ops.defs.iOS15.random": [[13, 2, 1, "", "random_bernoulli"], [13, 2, 1, "", "random_categorical"], [13, 2, 1, "", "random_normal"], [13, 2, 1, "", "random_uniform"]], "coremltools.converters.mil.mil.ops.defs.iOS15.recurrent": [[13, 2, 1, "", "gru"], [13, 2, 1, "", "lstm"], [13, 2, 1, "", "rnn"]], "coremltools.converters.mil.mil.ops.defs.iOS15.reduction": [[13, 2, 1, "", "reduce_argmax"], [13, 2, 1, "", "reduce_argmin"], [13, 2, 1, "", "reduce_l1_norm"], [13, 2, 1, "", "reduce_l2_norm"], [13, 2, 1, "", "reduce_log_sum"], [13, 2, 1, "", "reduce_log_sum_exp"], [13, 2, 1, "", "reduce_max"], [13, 2, 1, "", "reduce_mean"], [13, 2, 1, "", "reduce_min"], [13, 2, 1, "", "reduce_prod"], [13, 2, 1, "", "reduce_sum"], [13, 2, 1, "", "reduce_sum_square"]], "coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather": [[13, 2, 1, "", "gather"], [13, 2, 1, "", "gather_along_axis"], [13, 2, 1, "", "gather_nd"], [13, 2, 1, "", "scatter"], [13, 2, 1, "", "scatter_along_axis"], [13, 2, 1, "", "scatter_nd"]], "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation": [[13, 2, 1, "", "argsort"], [13, 2, 1, "", "band_part"], [13, 2, 1, "", "concat"], [13, 2, 1, "", "cumsum"], [13, 2, 1, "", "fill"], [13, 2, 1, "", "flatten2d"], [13, 2, 1, "", "identity"], [13, 2, 1, "", "non_maximum_suppression"], [13, 2, 1, "", "non_zero"], [13, 2, 1, "", "one_hot"], [13, 2, 1, "", "pad"], [13, 2, 1, "", "range_1d"], [13, 2, 1, "", "shape"], [13, 2, 1, "", "split"], [13, 2, 1, "", "stack"], [13, 2, 1, "", "tile"], [13, 2, 1, "", "topk"]], "coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation": [[13, 2, 1, "", "batch_to_space"], [13, 2, 1, "", "depth_to_space"], [13, 2, 1, "", "expand_dims"], [13, 2, 1, "", "pixel_shuffle"], [13, 2, 1, "", "reshape"], [13, 2, 1, "", "reverse"], [13, 2, 1, "", "reverse_sequence"], [13, 2, 1, "", "slice_by_index"], [13, 2, 1, "", "slice_by_size"], [13, 2, 1, "", "sliding_windows"], [13, 2, 1, "", "space_to_batch"], [13, 2, 1, "", "space_to_depth"], [13, 2, 1, "", "squeeze"], [13, 2, 1, "", "transpose"]], "coremltools.converters.mil.mil.ops.defs.iOS16": [[13, 0, 0, "-", "constexpr_ops"], [13, 0, 0, "-", "image_resizing"], [13, 0, 0, "-", "scatter_gather"], [13, 0, 0, "-", "tensor_operation"], [13, 0, 0, "-", "tensor_transformation"]], "coremltools.converters.mil.mil.ops.defs.iOS16.constexpr_ops": [[13, 2, 1, "", "constexpr_affine_dequantize"], [13, 2, 1, "", "constexpr_cast"], [13, 2, 1, "", "constexpr_lut_to_dense"], [13, 2, 1, "", "constexpr_sparse_to_dense"]], "coremltools.converters.mil.mil.ops.defs.iOS16.image_resizing": [[13, 2, 1, "", "crop_resize"], [13, 2, 1, "", "resample"], [13, 2, 1, "", "upsample_bilinear"]], "coremltools.converters.mil.mil.ops.defs.iOS16.scatter_gather": [[13, 2, 1, "", "gather"], [13, 2, 1, "", "gather_nd"]], "coremltools.converters.mil.mil.ops.defs.iOS16.tensor_operation": [[13, 2, 1, "", "fill_like"], [13, 2, 1, "", "topk"]], "coremltools.converters.mil.mil.ops.defs.iOS16.tensor_transformation": [[13, 2, 1, "", "pixel_unshuffle"], [13, 2, 1, "", "reshape_like"]], "coremltools.converters.mil.mil.ops.defs.iOS17": [[13, 0, 0, "-", "activation"], [13, 0, 0, "-", "conv"], [13, 0, 0, "-", "elementwise_unary"], [13, 0, 0, "-", "image_resizing"], [13, 0, 0, "-", "linear"], [13, 0, 0, "-", "normalization"], [13, 0, 0, "-", "quantization_ops"], [13, 0, 0, "-", "recurrent"], [13, 0, 0, "-", "reduction"], [13, 0, 0, "-", "scatter_gather"], [13, 0, 0, "-", "tensor_operation"], [13, 0, 0, "-", "tensor_transformation"]], "coremltools.converters.mil.mil.ops.defs.iOS17.activation": [[13, 2, 1, "", "clamped_relu"], [13, 2, 1, "", "elu"], [13, 2, 1, "", "leaky_relu"], [13, 2, 1, "", "linear_activation"], [13, 2, 1, "", "prelu"], [13, 2, 1, "", "scaled_tanh"], [13, 2, 1, "", "sigmoid_hard"], [13, 2, 1, "", "softplus_parametric"], [13, 2, 1, "", "thresholded_relu"]], "coremltools.converters.mil.mil.ops.defs.iOS17.conv": [[13, 2, 1, "", "conv"], [13, 2, 1, "", "conv_transpose"]], "coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_unary": [[13, 2, 1, "", "cast"], [13, 2, 1, "", "clip"], [13, 2, 1, "", "inverse"], [13, 2, 1, "", "log"], [13, 2, 1, "", "rsqrt"]], "coremltools.converters.mil.mil.ops.defs.iOS17.image_resizing": [[13, 2, 1, "", "crop_resize"], [13, 2, 1, "", "resample"], [13, 2, 1, "", "resize"]], "coremltools.converters.mil.mil.ops.defs.iOS17.linear": [[13, 2, 1, "", "linear"], [13, 2, 1, "", "matmul"]], "coremltools.converters.mil.mil.ops.defs.iOS17.normalization": [[13, 2, 1, "", "batch_norm"], [13, 2, 1, "", "instance_norm"], [13, 2, 1, "", "l2_norm"], [13, 2, 1, "", "layer_norm"], [13, 2, 1, "", "local_response_norm"]], "coremltools.converters.mil.mil.ops.defs.iOS17.quantization_ops": [[13, 2, 1, "", "dequantize"], [13, 2, 1, "", "quantize"]], "coremltools.converters.mil.mil.ops.defs.iOS17.recurrent": [[13, 2, 1, "", "gru"], [13, 2, 1, "", "lstm"], [13, 2, 1, "", "rnn"]], "coremltools.converters.mil.mil.ops.defs.iOS17.reduction": [[13, 2, 1, "", "reduce_argmax"], [13, 2, 1, "", "reduce_argmin"]], "coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather": [[13, 2, 1, "", "gather"], [13, 2, 1, "", "gather_along_axis"], [13, 2, 1, "", "gather_nd"], [13, 2, 1, "", "scatter"], [13, 2, 1, "", "scatter_along_axis"], [13, 2, 1, "", "scatter_nd"]], "coremltools.converters.mil.mil.ops.defs.iOS17.tensor_operation": [[13, 2, 1, "", "non_maximum_suppression"], [13, 2, 1, "", "topk"]], "coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transformation": [[13, 2, 1, "", "expand_dims"], [13, 2, 1, "", "reshape"], [13, 2, 1, "", "reshape_like"], [13, 2, 1, "", "reverse"], [13, 2, 1, "", "reverse_sequence"], [13, 2, 1, "", "slice_by_index"], [13, 2, 1, "", "slice_by_size"], [13, 2, 1, "", "sliding_windows"], [13, 2, 1, "", "squeeze"], [13, 2, 1, "", "transpose"]], "coremltools.converters.mil.mil.ops.defs.iOS18": [[13, 0, 0, "-", "compression"], [13, 0, 0, "-", "recurrent"], [13, 0, 0, "-", "states"], [13, 0, 0, "-", "tensor_transformation"], [13, 0, 0, "-", "transformers"]], "coremltools.converters.mil.mil.ops.defs.iOS18.compression": [[13, 2, 1, "", "constexpr_blockwise_shift_scale"], [13, 2, 1, "", "constexpr_cast"], [13, 2, 1, "", "constexpr_lut_to_dense"], [13, 2, 1, "", "constexpr_lut_to_sparse"], [13, 2, 1, "", "constexpr_sparse_blockwise_shift_scale"], [13, 2, 1, "", "constexpr_sparse_to_dense"]], "coremltools.converters.mil.mil.ops.defs.iOS18.recurrent": [[13, 2, 1, "", "gru"]], "coremltools.converters.mil.mil.ops.defs.iOS18.states": [[13, 2, 1, "", "read_state"]], "coremltools.converters.mil.mil.ops.defs.iOS18.tensor_transformation": [[13, 2, 1, "", "slice_update"]], "coremltools.converters.mil.mil.ops.defs.iOS18.transformers": [[13, 2, 1, "", "scaled_dot_product_attention"]], "coremltools.converters.mil.mil.passes.defs": [[14, 0, 0, "-", "cleanup"], [14, 0, 0, "-", "optimize_activation"], [14, 0, 0, "-", "optimize_conv"], [14, 0, 0, "-", "optimize_elementwise_binary"], [14, 0, 0, "-", "optimize_linear"], [14, 0, 0, "-", "optimize_normalization"], [14, 0, 0, "-", "optimize_quantization"], [14, 0, 0, "-", "optimize_repeat_ops"], [14, 0, 0, "-", "optimize_state"], [14, 0, 0, "-", "optimize_tensor_operation"], [14, 0, 0, "-", "preprocess"], [14, 0, 0, "-", "quantization"], [14, 0, 0, "-", "symbol_transform"]], "coremltools.converters.mil.mil.passes.defs.cleanup": [[14, 2, 1, "", "const_deduplication"], [14, 2, 1, "", "const_elimination"], [14, 2, 1, "", "dead_code_elimination"], [14, 2, 1, "", "dedup_op_and_var_names"], [14, 2, 1, "", "expand_dynamic_linear"], [14, 2, 1, "", "fuse_reduce_mean"], [14, 2, 1, "", "loop_invariant_elimination"], [14, 2, 1, "", "noop_elimination"], [14, 2, 1, "", "remove_redundant_ops"], [14, 2, 1, "", "remove_symbolic_reshape"], [14, 2, 1, "", "topological_reorder"]], "coremltools.converters.mil.mil.passes.defs.optimize_activation": [[14, 2, 1, "", "fuse_gelu_exact"], [14, 2, 1, "", "fuse_gelu_tanh_approximation"], [14, 2, 1, "", "fuse_leaky_relu"], [14, 2, 1, "", "fuse_prelu"], [14, 2, 1, "", "prelu_to_lrelu"]], "coremltools.converters.mil.mil.passes.defs.optimize_conv": [[14, 2, 1, "", "add_conv_transpose_output_shape"], [14, 2, 1, "", "compose_conv1d"], [14, 2, 1, "", "fuse_conv_batchnorm"], [14, 2, 1, "", "fuse_conv_bias"], [14, 2, 1, "", "fuse_conv_scale"], [14, 2, 1, "", "fuse_pad_conv"]], "coremltools.converters.mil.mil.passes.defs.optimize_elementwise_binary": [[14, 2, 1, "", "divide_to_multiply"], [14, 2, 1, "", "fuse_elementwise_to_batchnorm"], [14, 2, 1, "", "rank0_expand_dims_swap"], [14, 2, 1, "", "select_optimization"]], "coremltools.converters.mil.mil.passes.defs.optimize_linear": [[14, 2, 1, "", "fuse_linear_bias"], [14, 2, 1, "", "fuse_matmul_weight_bias"], [14, 2, 1, "", "fuse_transpose_matmul"]], "coremltools.converters.mil.mil.passes.defs.optimize_normalization": [[14, 2, 1, "", "fuse_layernorm_or_instancenorm"]], "coremltools.converters.mil.mil.passes.defs.optimize_quantization": [[14, 2, 1, "", "dequantize_quantize_pair_elimination"], [14, 2, 1, "", "dequantize_to_constexpr"], [14, 2, 1, "", "distributive_quantized_binary_op_scale_normalization"], [14, 2, 1, "", "int_op_canonicalization"], [14, 2, 1, "", "merge_affine_dequantize_with_consecutive_ops"], [14, 2, 1, "", "nullify_redundant_quantization_zero_point"]], "coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops": [[14, 2, 1, "", "cast_optimization"], [14, 2, 1, "", "merge_consecutive_paddings"], [14, 2, 1, "", "merge_consecutive_relus"], [14, 2, 1, "", "merge_consecutive_reshapes"], [14, 2, 1, "", "merge_consecutive_transposes"], [14, 2, 1, "", "reduce_transposes"]], "coremltools.converters.mil.mil.passes.defs.optimize_state": [[14, 2, 1, "", "canonicalize_inplace_pattern"], [14, 2, 1, "", "prefer_state_in_downstream"]], "coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation": [[14, 2, 1, "", "concat_to_pixel_shuffle"], [14, 2, 1, "", "detect_concat_interleave"], [14, 2, 1, "", "expand_high_rank_reshape_and_transpose"], [14, 2, 1, "", "fuse_onehot_matmul_to_gather"], [14, 2, 1, "", "replace_stack_reshape"], [14, 2, 1, "", "use_reflection_padding"]], "coremltools.converters.mil.mil.passes.defs.preprocess": [[14, 2, 1, "", "image_input_preprocess"], [14, 2, 1, "", "sanitize_input_output_names"], [14, 2, 1, "", "update_output_dtypes"]], "coremltools.converters.mil.mil.passes.defs.quantization": [[14, 2, 1, "", "add_fp16_cast"]], "coremltools.converters.mil.mil.passes.defs.symbol_transform": [[14, 2, 1, "", "materialize_symbolic_shape_program"]], "coremltools.converters.sklearn": [[15, 0, 0, "-", "_converter"]], "coremltools.converters.sklearn._converter": [[15, 1, 1, "", "convert"]], "coremltools.converters.xgboost": [[16, 0, 0, "-", "_tree"]], "coremltools.converters.xgboost._tree": [[16, 1, 1, "", "convert"]], "coremltools.models": [[17, 2, 1, "", "CompiledMLModel"], [17, 0, 0, "-", "array_feature_extractor"], [17, 0, 0, "-", "feature_vectorizer"], [17, 0, 0, "-", "model"], [17, 0, 0, "-", "pipeline"], [17, 0, 0, "-", "tree_ensemble"], [17, 0, 0, "-", "utils"]], "coremltools.models.CompiledMLModel": [[17, 3, 1, "", "__init__"], [17, 3, 1, "", "make_state"], [17, 3, 1, "", "predict"]], "coremltools.models.array_feature_extractor": [[17, 1, 1, "", "create_array_feature_extractor"]], "coremltools.models.feature_vectorizer": [[17, 1, 1, "", "create_feature_vectorizer"]], "coremltools.models.ml_program": [[18, 0, 0, "-", "compression_utils"]], "coremltools.models.ml_program.compression_utils": [[18, 1, 1, "", "affine_quantize_weights"], [18, 1, 1, "", "palettize_weights"], [18, 1, 1, "", "sparsify_weights"]], "coremltools.models.model": [[17, 2, 1, "", "MLModel"]], "coremltools.models.model.MLModel": [[17, 3, 1, "", "__init__"], [17, 3, 1, "", "get_compiled_model_path"], [17, 3, 1, "", "get_spec"], [17, 3, 1, "", "make_state"], [17, 3, 1, "", "predict"], [17, 3, 1, "", "save"]], "coremltools.models.nearest_neighbors": [[17, 0, 0, "-", "builder"]], "coremltools.models.nearest_neighbors.builder": [[17, 2, 1, "", "KNearestNeighborsClassifierBuilder"]], "coremltools.models.nearest_neighbors.builder.KNearestNeighborsClassifierBuilder": [[17, 3, 1, "", "__init__"], [17, 3, 1, "", "add_samples"], [17, 4, 1, "", "author"], [17, 4, 1, "", "description"], [17, 4, 1, "", "index_type"], [17, 4, 1, "", "is_updatable"], [17, 4, 1, "", "leaf_size"], [17, 4, 1, "", "license"], [17, 4, 1, "", "number_of_dimensions"], [17, 4, 1, "", "number_of_neighbors"], [17, 3, 1, "", "number_of_neighbors_allowed_range"], [17, 3, 1, "", "number_of_neighbors_allowed_set"], [17, 3, 1, "", "set_index_type"], [17, 3, 1, "", "set_number_of_neighbors_with_bounds"], [17, 4, 1, "", "weighting_scheme"]], "coremltools.models.neural_network": [[19, 0, 0, "-", "builder"], [19, 0, 0, "-", "flexible_shape_utils"], [19, 0, 0, "-", "quantization_utils"], [19, 0, 0, "-", "update_optimizer_utils"]], "coremltools.models.neural_network.builder": [[19, 2, 1, "", "NeuralNetworkBuilder"]], "coremltools.models.neural_network.builder.NeuralNetworkBuilder": [[19, 3, 1, "", "__init__"], [19, 3, 1, "", "add_acos"], [19, 3, 1, "", "add_acosh"], [19, 3, 1, "", "add_activation"], [19, 3, 1, "", "add_add_broadcastable"], [19, 3, 1, "", "add_argmax"], [19, 3, 1, "", "add_argmin"], [19, 3, 1, "", "add_argsort"], [19, 3, 1, "", "add_asin"], [19, 3, 1, "", "add_asinh"], [19, 3, 1, "", "add_atan"], [19, 3, 1, "", "add_atanh"], [19, 3, 1, "", "add_batched_mat_mul"], [19, 3, 1, "", "add_batchnorm"], [19, 3, 1, "", "add_bias"], [19, 3, 1, "", "add_bidirlstm"], [19, 3, 1, "", "add_branch"], [19, 3, 1, "", "add_broadcast_to_dynamic"], [19, 3, 1, "", "add_broadcast_to_like"], [19, 3, 1, "", "add_broadcast_to_static"], [19, 3, 1, "", "add_categorical_distribution"], [19, 3, 1, "", "add_ceil"], [19, 3, 1, "", "add_clamped_relu"], [19, 3, 1, "", "add_clip"], [19, 3, 1, "", "add_concat_nd"], [19, 3, 1, "", "add_constant_pad"], [19, 3, 1, "", "add_convolution"], [19, 3, 1, "", "add_convolution3d"], [19, 3, 1, "", "add_copy"], [19, 3, 1, "", "add_cos"], [19, 3, 1, "", "add_cosh"], [19, 3, 1, "", "add_crop"], [19, 3, 1, "", "add_crop_resize"], [19, 3, 1, "", "add_cumsum"], [19, 3, 1, "", "add_custom"], [19, 3, 1, "", "add_divide_broadcastable"], [19, 3, 1, "", "add_elementwise"], [19, 3, 1, "", "add_embedding"], [19, 3, 1, "", "add_embedding_nd"], [19, 3, 1, "", "add_equal"], [19, 3, 1, "", "add_erf"], [19, 3, 1, "", "add_exp2"], [19, 3, 1, "", "add_expand_dims"], [19, 3, 1, "", "add_fill_dynamic"], [19, 3, 1, "", "add_fill_like"], [19, 3, 1, "", "add_fill_static"], [19, 3, 1, "", "add_flatten"], [19, 3, 1, "", "add_flatten_to_2d"], [19, 3, 1, "", "add_floor"], [19, 3, 1, "", "add_floor_div_broadcastable"], [19, 3, 1, "", "add_gather"], [19, 3, 1, "", "add_gather_along_axis"], [19, 3, 1, "", "add_gather_nd"], [19, 3, 1, "", "add_gelu"], [19, 3, 1, "", "add_get_shape"], [19, 3, 1, "", "add_global_pooling3d"], [19, 3, 1, "", "add_greater_than"], [19, 3, 1, "", "add_gru"], [19, 3, 1, "", "add_inner_product"], [19, 3, 1, "", "add_l2_normalize"], [19, 3, 1, "", "add_layer_normalization"], [19, 3, 1, "", "add_less_than"], [19, 3, 1, "", "add_load_constant"], [19, 3, 1, "", "add_load_constant_nd"], [19, 3, 1, "", "add_logical"], [19, 3, 1, "", "add_loop"], [19, 3, 1, "", "add_loop_break"], [19, 3, 1, "", "add_loop_continue"], [19, 3, 1, "", "add_lower_triangular"], [19, 3, 1, "", "add_lrn"], [19, 3, 1, "", "add_matrix_band_part"], [19, 3, 1, "", "add_max_broadcastable"], [19, 3, 1, "", "add_min_broadcastable"], [19, 3, 1, "", "add_mod_broadcastable"], [19, 3, 1, "", "add_multiply_broadcastable"], [19, 3, 1, "", "add_mvn"], [19, 3, 1, "", "add_nms"], [19, 3, 1, "", "add_not_equal"], [19, 3, 1, "", "add_one_hot"], [19, 3, 1, "", "add_optionals"], [19, 3, 1, "", "add_padding"], [19, 3, 1, "", "add_permute"], [19, 3, 1, "", "add_pooling"], [19, 3, 1, "", "add_pooling3d"], [19, 3, 1, "", "add_pow_broadcastable"], [19, 3, 1, "", "add_random_bernoulli_dynamic"], [19, 3, 1, "", "add_random_bernoulli_like"], [19, 3, 1, "", "add_random_bernoulli_static"], [19, 3, 1, "", "add_random_normal_dynamic"], [19, 3, 1, "", "add_random_normal_like"], [19, 3, 1, "", "add_random_normal_static"], [19, 3, 1, "", "add_random_uniform_dynamic"], [19, 3, 1, "", "add_random_uniform_like"], [19, 3, 1, "", "add_random_uniform_static"], [19, 3, 1, "", "add_range_dynamic"], [19, 3, 1, "", "add_range_static"], [19, 3, 1, "", "add_rank_preserving_reshape"], [19, 3, 1, "", "add_reduce"], [19, 3, 1, "", "add_reduce_l1"], [19, 3, 1, "", "add_reduce_l2"], [19, 3, 1, "", "add_reduce_logsum"], [19, 3, 1, "", "add_reduce_logsumexp"], [19, 3, 1, "", "add_reduce_max"], [19, 3, 1, "", "add_reduce_mean"], [19, 3, 1, "", "add_reduce_min"], [19, 3, 1, "", "add_reduce_prod"], [19, 3, 1, "", "add_reduce_sum"], [19, 3, 1, "", "add_reduce_sumsquare"], [19, 3, 1, "", "add_reorganize_data"], [19, 3, 1, "", "add_reshape"], [19, 3, 1, "", "add_reshape_dynamic"], [19, 3, 1, "", "add_reshape_like"], [19, 3, 1, "", "add_reshape_static"], [19, 3, 1, "", "add_resize_bilinear"], [19, 3, 1, "", "add_reverse"], [19, 3, 1, "", "add_reverse_sequence"], [19, 3, 1, "", "add_round"], [19, 3, 1, "", "add_scale"], [19, 3, 1, "", "add_scatter"], [19, 3, 1, "", "add_scatter_along_axis"], [19, 3, 1, "", "add_scatter_nd"], [19, 3, 1, "", "add_sequence_repeat"], [19, 3, 1, "", "add_sign"], [19, 3, 1, "", "add_simple_rnn"], [19, 3, 1, "", "add_sin"], [19, 3, 1, "", "add_sinh"], [19, 3, 1, "", "add_slice"], [19, 3, 1, "", "add_slice_by_size"], [19, 3, 1, "", "add_slice_dynamic"], [19, 3, 1, "", "add_slice_static"], [19, 3, 1, "", "add_sliding_windows"], [19, 3, 1, "", "add_softmax"], [19, 3, 1, "", "add_softmax_nd"], [19, 3, 1, "", "add_split"], [19, 3, 1, "", "add_split_nd"], [19, 3, 1, "", "add_squeeze"], [19, 3, 1, "", "add_stack"], [19, 3, 1, "", "add_subtract_broadcastable"], [19, 3, 1, "", "add_tan"], [19, 3, 1, "", "add_tanh"], [19, 3, 1, "", "add_tile"], [19, 3, 1, "", "add_topk"], [19, 3, 1, "", "add_transpose"], [19, 3, 1, "", "add_unary"], [19, 3, 1, "", "add_unilstm"], [19, 3, 1, "", "add_upper_triangular"], [19, 3, 1, "", "add_upsample"], [19, 3, 1, "", "add_where_broadcastable"], [19, 3, 1, "", "add_where_nonzero"], [19, 3, 1, "", "inspect_conv_channels"], [19, 3, 1, "", "inspect_innerproduct_channels"], [19, 3, 1, "", "inspect_input_features"], [19, 3, 1, "", "inspect_layers"], [19, 3, 1, "", "inspect_loss_layers"], [19, 3, 1, "", "inspect_optimizer"], [19, 3, 1, "", "inspect_output_features"], [19, 3, 1, "", "inspect_updatable_layers"], [19, 3, 1, "", "make_updatable"], [19, 3, 1, "", "set_categorical_cross_entropy_loss"], [19, 3, 1, "", "set_class_labels"], [19, 3, 1, "", "set_input"], [19, 3, 1, "", "set_mean_squared_error_loss"], [19, 3, 1, "", "set_optional_input"], [19, 3, 1, "", "set_output"], [19, 3, 1, "", "set_pre_processing_parameters"], [19, 3, 1, "", "set_training_input"]], "coremltools.models.neural_network.flexible_shape_utils": [[19, 2, 1, "", "NeuralNetworkImageSize"], [19, 2, 1, "", "NeuralNetworkImageSizeRange"], [19, 2, 1, "", "NeuralNetworkMultiArrayShape"], [19, 2, 1, "", "NeuralNetworkMultiArrayShapeRange"], [19, 1, 1, "", "add_enumerated_image_sizes"], [19, 1, 1, "", "add_enumerated_multiarray_shapes"], [19, 1, 1, "", "add_multiarray_ndshape_enumeration"], [19, 1, 1, "", "set_multiarray_ndshape_range"], [19, 1, 1, "", "update_image_size_range"], [19, 1, 1, "", "update_multiarray_shape_range"]], "coremltools.models.neural_network.flexible_shape_utils.NeuralNetworkMultiArrayShapeRange": [[19, 3, 1, "", "isFlexible"]], "coremltools.models.neural_network.quantization_utils": [[19, 2, 1, "", "AdvancedQuantizedLayerSelector"], [19, 2, 1, "", "MatrixMultiplyLayerSelector"], [19, 2, 1, "", "ModelMetrics"], [19, 2, 1, "", "OutputMetric"], [19, 2, 1, "", "QuantizedLayerSelector"], [19, 1, 1, "", "activate_int8_int8_matrix_multiplications"], [19, 1, 1, "", "compare_models"], [19, 1, 1, "", "quantize_weights"]], "coremltools.models.neural_network.quantization_utils.AdvancedQuantizedLayerSelector": [[19, 3, 1, "", "do_quantize"]], "coremltools.models.neural_network.quantization_utils.MatrixMultiplyLayerSelector": [[19, 3, 1, "", "do_quantize"]], "coremltools.models.neural_network.update_optimizer_utils": [[19, 2, 1, "", "AdamParams"], [19, 2, 1, "", "Batch"], [19, 2, 1, "", "RangeParam"], [19, 2, 1, "", "SgdParams"]], "coremltools.models.pipeline": [[17, 2, 1, "", "Pipeline"], [17, 2, 1, "", "PipelineClassifier"], [17, 2, 1, "", "PipelineRegressor"]], "coremltools.models.pipeline.Pipeline": [[17, 3, 1, "", "__init__"], [17, 3, 1, "", "add_model"], [17, 3, 1, "", "set_training_input"]], "coremltools.models.pipeline.PipelineClassifier": [[17, 3, 1, "", "__init__"], [17, 3, 1, "", "add_model"], [17, 3, 1, "", "set_training_input"]], "coremltools.models.pipeline.PipelineRegressor": [[17, 3, 1, "", "__init__"], [17, 3, 1, "", "add_model"], [17, 3, 1, "", "set_training_input"]], "coremltools.models.tree_ensemble": [[17, 2, 1, "", "TreeEnsembleBase"], [17, 2, 1, "", "TreeEnsembleClassifier"], [17, 2, 1, "", "TreeEnsembleRegressor"]], "coremltools.models.tree_ensemble.TreeEnsembleBase": [[17, 3, 1, "", "__init__"], [17, 3, 1, "", "add_branch_node"], [17, 3, 1, "", "add_leaf_node"], [17, 3, 1, "", "set_default_prediction_value"], [17, 3, 1, "", "set_post_evaluation_transform"]], "coremltools.models.tree_ensemble.TreeEnsembleClassifier": [[17, 3, 1, "", "__init__"]], "coremltools.models.tree_ensemble.TreeEnsembleRegressor": [[17, 3, 1, "", "__init__"]], "coremltools.models.utils": [[17, 2, 1, "", "MultiFunctionDescriptor"], [17, 1, 1, "", "bisect_model"], [17, 1, 1, "", "compile_model"], [17, 1, 1, "", "convert_double_to_float_multiarray_type"], [17, 1, 1, "", "evaluate_classifier"], [17, 1, 1, "", "evaluate_classifier_with_probabilities"], [17, 1, 1, "", "evaluate_regressor"], [17, 1, 1, "", "evaluate_transformer"], [17, 1, 1, "", "load_spec"], [17, 1, 1, "", "make_pipeline"], [17, 1, 1, "", "materialize_dynamic_shape_mlmodel"], [17, 1, 1, "", "randomize_weights"], [17, 1, 1, "", "rename_feature"], [17, 1, 1, "", "save_multifunction"], [17, 1, 1, "", "save_spec"]], "coremltools.models.utils.MultiFunctionDescriptor": [[17, 3, 1, "", "__init__"], [17, 3, 1, "", "add_function"], [17, 3, 1, "", "add_model"], [17, 3, 1, "", "remove_function"]], "coremltools.optimize": [[25, 0, 0, "-", "coreml"]], "coremltools.optimize.coreml": [[25, 2, 1, "", "CoreMLOpMetaData"], [25, 2, 1, "", "CoreMLWeightMetaData"], [24, 2, 1, "", "OpLinearQuantizerConfig"], [23, 2, 1, "", "OpMagnitudePrunerConfig"], [21, 2, 1, "", "OpPalettizerConfig"], [23, 2, 1, "", "OpThresholdPrunerConfig"], [25, 2, 1, "", "OptimizationConfig"], [25, 1, 1, "", "decompress_weights"], [25, 1, 1, "", "get_weights_metadata"], [24, 1, 1, "", "linear_quantize_weights"], [22, 1, 1, "", "palettize_weights"], [23, 1, 1, "", "prune_weights"]], "coremltools.optimize.coreml.OptimizationConfig": [[25, 3, 1, "", "from_dict"], [25, 3, 1, "", "from_yaml"], [25, 3, 1, "", "set_global"], [25, 3, 1, "", "set_op_name"], [25, 3, 1, "", "set_op_type"]], "coremltools.optimize.coreml.experimental": [[24, 1, 1, "", "linear_quantize_activations"]], "coremltools.optimize.torch.layerwise_compression": [[29, 2, 1, "", "LayerwiseCompressor"], [29, 2, 1, "", "LayerwiseCompressorConfig"]], "coremltools.optimize.torch.layerwise_compression.LayerwiseCompressor": [[29, 3, 1, "", "compress"]], "coremltools.optimize.torch.layerwise_compression.LayerwiseCompressorConfig": [[29, 3, 1, "", "as_dict"], [29, 3, 1, "", "from_dict"], [29, 3, 1, "", "from_yaml"]], "coremltools.optimize.torch.layerwise_compression.algorithms": [[29, 2, 1, "", "GPTQ"], [29, 2, 1, "", "ModuleGPTQConfig"], [28, 2, 1, "", "ModuleSparseGPTConfig"], [28, 2, 1, "", "SparseGPT"]], "coremltools.optimize.torch.palettization": [[27, 2, 1, "", "DKMPalettizer"], [27, 2, 1, "", "DKMPalettizerConfig"], [27, 2, 1, "", "ModuleDKMPalettizerConfig"], [27, 2, 1, "", "ModulePostTrainingPalettizerConfig"], [27, 2, 1, "", "ModuleSKMPalettizerConfig"], [27, 2, 1, "", "PostTrainingPalettizer"], [27, 2, 1, "", "PostTrainingPalettizerConfig"], [27, 2, 1, "", "SKMPalettizer"], [27, 2, 1, "", "SKMPalettizerConfig"]], "coremltools.optimize.torch.palettization.DKMPalettizer": [[27, 3, 1, "", "finalize"], [27, 3, 1, "", "prepare"], [27, 3, 1, "", "report"], [27, 3, 1, "", "step"]], "coremltools.optimize.torch.palettization.DKMPalettizerConfig": [[27, 3, 1, "", "as_dict"], [27, 3, 1, "", "from_dict"], [27, 3, 1, "", "from_yaml"], [27, 3, 1, "", "set_global"], [27, 3, 1, "", "set_module_name"], [27, 3, 1, "", "set_module_type"]], "coremltools.optimize.torch.palettization.ModuleDKMPalettizerConfig": [[27, 3, 1, "", "as_dict"], [27, 3, 1, "", "from_dict"], [27, 3, 1, "", "from_yaml"]], "coremltools.optimize.torch.pruning": [[28, 2, 1, "", "MagnitudePruner"], [28, 2, 1, "", "MagnitudePrunerConfig"], [28, 2, 1, "", "ModuleMagnitudePrunerConfig"]], "coremltools.optimize.torch.pruning.MagnitudePruner": [[28, 3, 1, "", "finalize"], [28, 3, 1, "", "prepare"], [28, 3, 1, "", "report"], [28, 3, 1, "", "step"]], "coremltools.optimize.torch.pruning.MagnitudePrunerConfig": [[28, 3, 1, "", "as_dict"], [28, 3, 1, "", "from_dict"], [28, 3, 1, "", "from_yaml"], [28, 3, 1, "", "set_global"], [28, 3, 1, "", "set_module_name"], [28, 3, 1, "", "set_module_type"]], "coremltools.optimize.torch.pruning.ModuleMagnitudePrunerConfig": [[28, 3, 1, "", "as_dict"], [28, 3, 1, "", "from_dict"], [28, 3, 1, "", "from_yaml"]], "coremltools.optimize.torch.pruning.pruning_scheduler": [[28, 2, 1, "", "ConstantSparsityScheduler"], [28, 2, 1, "", "PolynomialDecayScheduler"], [28, 2, 1, "", "PruningScheduler"]], "coremltools.optimize.torch.pruning.pruning_scheduler.ConstantSparsityScheduler": [[28, 3, 1, "", "compute_sparsity"]], "coremltools.optimize.torch.pruning.pruning_scheduler.PolynomialDecayScheduler": [[28, 3, 1, "", "compute_sparsity"]], "coremltools.optimize.torch.quantization": [[29, 2, 1, "", "LinearQuantizer"], [29, 2, 1, "", "LinearQuantizerConfig"], [29, 2, 1, "", "ModuleLinearQuantizerConfig"], [29, 2, 1, "", "ModulePostTrainingQuantizerConfig"], [29, 2, 1, "", "ObserverType"], [29, 2, 1, "", "PostTrainingQuantizer"], [29, 2, 1, "", "PostTrainingQuantizerConfig"], [29, 2, 1, "", "QuantizationScheme"]], "coremltools.optimize.torch.quantization.LinearQuantizer": [[29, 3, 1, "", "finalize"], [29, 3, 1, "", "prepare"], [29, 3, 1, "", "report"], [29, 3, 1, "", "step"]], "coremltools.optimize.torch.quantization.LinearQuantizerConfig": [[29, 3, 1, "", "as_dict"], [29, 3, 1, "", "from_dict"], [29, 3, 1, "", "from_yaml"], [29, 3, 1, "", "set_global"], [29, 3, 1, "", "set_module_name"], [29, 3, 1, "", "set_module_type"]], "coremltools.optimize.torch.quantization.ModuleLinearQuantizerConfig": [[29, 3, 1, "", "as_dict"], [29, 3, 1, "", "from_dict"], [29, 3, 1, "", "from_yaml"]]}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "function", "Python function"], "2": ["py", "class", "Python class"], "3": ["py", "method", "Python method"], "4": ["py", "property", "Python property"]}, "objtypes": {"0": "py:module", "1": "py:function", "2": "py:class", "3": "py:method", "4": "py:property"}, "terms": {"": [9, 12, 13, 14, 15, 17, 19, 21, 22, 24, 25, 27, 28, 29], "0": [0, 2, 3, 4, 6, 9, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 27, 28, 29], "00": [4, 6, 13, 23, 28], "000": [4, 6], "00000001b": [22, 23], "0001": 27, "001": 0, "008": 0, "00b": [21, 22], "01": [0, 13, 19, 23, 25, 28, 29], "01b": [21, 22], "02": [21, 22], "03": 23, "0447": 14, "044715": 14, "044715x": 13, "05": [19, 23, 28], "06": 19, "07": [2, 3, 27], "08": [13, 19, 21, 22, 23, 28], "0b1": 19, "0f": [0, 2, 3], "0th": 2, "1": [0, 2, 3, 4, 7, 9, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 27, 28, 29], "10": [0, 2, 3, 13, 14, 17, 19, 21, 22, 23, 24, 25, 29], "100": [0, 2, 3, 14, 27, 29], "1000": [17, 19, 27], "100100": [22, 23], "1024": [0, 14, 21, 22, 23, 24, 25, 27], "10b": [21, 22], "11": [9, 13, 17, 19, 21, 22], "11b": [21, 22], "12": [2, 3, 13, 17, 18, 22, 23, 25], "1240": 17, "127": [14, 22, 24, 27], "128": [0, 2, 3, 14, 19, 22, 24, 25, 27, 28, 29], "13": [9, 13, 17], "1307": [0, 2, 3], "14": 19, "15": [9, 17, 19], "16": [0, 9, 19, 22, 24, 25, 27, 28, 29], "1623": 28, "1651": 9, "17": 2, "19": [21, 22], "1d": [13, 14], "1e": [2, 3, 13, 18, 19, 22, 23, 25, 27], "1e6": 9, "1f": [0, 2, 3], "1st": [2, 13, 14, 19], "2": [0, 2, 3, 9, 11, 12, 13, 14, 15, 17, 19, 21, 22, 23, 25, 27, 28, 29], "20": [21, 22, 23, 24, 25, 27, 28, 29], "200": [21, 22, 23, 24, 25, 29], "2016": 14, "2048": [21, 22, 23, 24, 25, 27], "214": 14, "224": 13, "2352": [2, 3], "2361": 28, "2400": 17, "2426": 28, "2450": 28, "25": [13, 27], "2501220703125": [22, 25], "254": [22, 24], "255": [14, 22, 24], "256": [9, 13, 19, 21], "25600": [22, 25], "26": 13, "28": [0, 2, 3], "299": 19, "2_const": 14, "2d": [13, 14, 24], "2nd": [2, 14, 19], "2r": [22, 24], "3": [2, 3, 6, 7, 9, 11, 12, 13, 14, 15, 17, 19, 21, 22, 23, 25, 27, 28, 29], "30": 17, "300": [27, 29], "3081": [0, 2, 3], "31": 13, "3136": 0, "32": [0, 9, 12, 13, 16, 21, 22, 24, 25, 28], "33": 13, "3852": 28, "39": 14, "3rd": [14, 19, 23], "4": [0, 2, 3, 7, 12, 13, 14, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "400": 29, "4096": 19, "4097": [22, 25], "414": 14, "42": 13, "45": 13, "48": 12, "4f": [0, 2, 3], "4th": 19, "4x": [22, 24], "5": [0, 9, 13, 14, 17, 19, 21, 22, 23, 25, 28, 29], "50": [13, 19, 22, 25], "50000": 19, "54": 23, "56": [22, 23], "58": 13, "59165842": 14, "5th": 19, "5x": 13, "6": [13, 14, 19, 21, 22, 23, 25, 28], "6056": 28, "61": [23, 28], "62": [23, 28], "64": [0, 12, 13, 14, 19, 22, 25, 27], "65536": 27, "67": 13, "6f": [0, 2, 3], "7": [3, 13, 14, 21, 23, 27, 28], "70": 3, "702x": 13, "75": [13, 23, 28], "8": [3, 13, 14, 19, 21, 22, 23, 24, 25, 27, 28, 29], "80": 3, "8x": 13, "9": [13, 14, 19, 23, 28], "900": 3, "92": 13, "96": 14, "999": 19, "9_": 14, "A": [9, 12, 13, 14, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "AND": [13, 19], "And": [13, 23, 28], "As": [3, 13, 14, 17], "At": [2, 12, 13, 17, 21, 22, 28, 29], "By": [9, 29], "For": [0, 3, 5, 9, 11, 12, 13, 14, 15, 17, 19, 20, 21, 22, 23, 24, 25, 27, 28, 29], "If": [9, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "In": [0, 2, 3, 9, 13, 14, 15, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "It": [0, 3, 9, 12, 13, 14, 17, 19, 21, 24, 25, 27, 28, 29], "Its": 13, "NOT": [13, 19], "No": [9, 13], "Not": 19, "OR": [13, 19], "On": 14, "One": [9, 13, 15, 17, 19, 27, 28], "That": [13, 14, 19, 21, 22, 23, 24, 25, 28, 29], "The": [0, 2, 3, 9, 11, 12, 13, 14, 15, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "Then": [23, 28], "There": 13, "These": [3, 14, 16, 27, 29], "To": [0, 2, 3, 9, 13, 14, 17, 19, 20, 28, 29], "With": [13, 14, 17, 22, 24], "_": 14, "_8": [22, 24], "__init__": [9, 12, 17, 19], "_activ": 13, "_bit": [22, 24, 27], "_bodi": 13, "_chunk1": 17, "_chunk2": 17, "_chunked_pipelin": 17, "_cond": 13, "_config": 24, "_convert": 15, "_converters_entri": 9, "_exampl": 4, "_examples_jupyt": 1, "_examples_python": 1, "_existing_block": 13, "_false_fn": 13, "_float": [22, 24], "_get_linear_lookup_table_and_weight": 19, "_hypotheticalvalu": 14, "_index": 28, "_integ": [22, 24], "_lazytransposehypotheticalvalu": 14, "_libsvm_convert": 10, "_linearquantizerconfig": 29, "_mil_program": 14, "_mlmodelproxi": 17, "_number": 28, "_of": 28, "_op": 11, "_quantization_mode_custom_lookup_t": 19, "_report": [27, 28, 29], "_sparsiti": 28, "_step": 28, "_to": [22, 24], "_torch": 29, "_transformconcat": 14, "_transformpad": 14, "_transposeoptim": 14, "_tree": 16, "_true_fn": 13, "_unary_like_op_typ": 14, "_updat": 28, "a0": 13, "a1": 13, "a2": 13, "a3": 13, "ab": [13, 19, 22, 24, 27], "abc": 28, "abl": 29, "about": [0, 2, 3, 9, 13, 14, 17, 21, 22, 23, 24], "abov": [9, 13, 14, 17, 19, 21, 22, 23, 24, 25], "absolut": [13, 19, 22, 23, 24, 25, 28], "abstract": 28, "acceler": 19, "accept": [12, 17, 27], "access": [22, 25], "accommod": 19, "accompani": [22, 23], "accord": [13, 15, 19, 21, 22, 29], "accordingli": [13, 14], "accumul": [13, 19], "accur": [27, 28, 29], "accuraci": [0, 2, 3], "accuracy_palett": 0, "accuracy_prun": 3, "accuracy_quant": 2, "accuracy_unpalett": 0, "accuracy_unprun": 3, "accuracy_unquant": 2, "achiev": [3, 13, 14, 22, 24, 28], "aco": [13, 19], "acosh": 19, "acoshlayerparam": 19, "acoslayerparam": 19, "across": [13, 14, 17, 19, 25, 27, 28, 29], "across_channel": 19, "act": [14, 27, 28], "activ": [2, 9, 14, 17, 19, 24, 27, 28, 29], "activate_int8_int8_matrix_multipl": 19, "activation_config": 24, "activation_dtyp": 29, "activation_observ": 29, "actual": [0, 13, 28], "ad": [13, 14, 17, 19, 28, 29], "adam": [2, 3, 19], "adamparam": 19, "add": [11, 13, 14, 17, 19, 27], "add_": 9, "add_0": 14, "add_aco": 19, "add_acosh": 19, "add_activ": 19, "add_add_broadcast": 19, "add_argmax": 19, "add_argmin": 19, "add_argsort": 19, "add_asin": 19, "add_asinh": 19, "add_atan": 19, "add_atanh": 19, "add_batched_mat_mul": 19, "add_batchnorm": 19, "add_bia": 19, "add_bidirlstm": 19, "add_branch": 19, "add_branch_nod": 17, "add_broadcast": 19, "add_broadcast_to_dynam": 19, "add_broadcast_to_lik": 19, "add_broadcast_to_stat": 19, "add_categorical_distribut": 19, "add_ceil": 19, "add_channel_rang": 19, "add_clamped_relu": 19, "add_clip": 19, "add_co": 19, "add_concat_nd": 19, "add_constant_pad": 19, "add_conv_transpose_output_shap": 14, "add_convolut": 19, "add_convolution3d": 19, "add_copi": 19, "add_cosh": 19, "add_crop": 19, "add_crop_res": 19, "add_cumsum": 19, "add_custom": 19, "add_divide_broadcast": 19, "add_elementwis": 19, "add_embed": 19, "add_embedding_nd": 19, "add_enumerated_image_s": 19, "add_enumerated_multiarray_shap": 19, "add_equ": 19, "add_erf": 19, "add_exp2": 19, "add_expand_dim": 19, "add_extra_centroid": 27, "add_fill_dynam": 19, "add_fill_lik": 19, "add_fill_stat": 19, "add_flatten": 19, "add_flatten_to_2d": 19, "add_floor": 19, "add_floor_div_broadcast": 19, "add_fp16_cast": 14, "add_funct": [11, 17], "add_gath": 19, "add_gather_along_axi": 19, "add_gather_nd": 19, "add_gelu": 19, "add_get_shap": 19, "add_global_pooling3d": 19, "add_greater_than": 19, "add_gru": 19, "add_height_rang": 19, "add_inner_product": 19, "add_l2_norm": 19, "add_layer_norm": 19, "add_leaf_nod": 17, "add_less_than": 19, "add_load_const": 19, "add_load_constant_nd": 19, "add_log": 19, "add_loop": 19, "add_loop_break": 19, "add_loop_continu": 19, "add_lower_triangular": 19, "add_lrn": 19, "add_matrix_band_part": 19, "add_max_broadcast": 19, "add_min_broadcast": 19, "add_mod_broadcast": 19, "add_model": 17, "add_multiarray_ndshape_enumer": 19, "add_multiply_broadcast": 19, "add_mvn": 19, "add_nm": 19, "add_not_equ": 19, "add_one_hot": 19, "add_opt": 19, "add_pad": 19, "add_permut": 19, "add_pool": 19, "add_pooling3d": 19, "add_pow_broadcast": 19, "add_random_bernoulli_dynam": 19, "add_random_bernoulli_lik": 19, "add_random_bernoulli_stat": 19, "add_random_normal_dynam": 19, "add_random_normal_lik": 19, "add_random_normal_stat": 19, "add_random_uniform_dynam": 19, "add_random_uniform_lik": 19, "add_random_uniform_stat": 19, "add_range_dynam": 19, "add_range_stat": 19, "add_rank_preserving_reshap": 19, "add_reduc": 19, "add_reduce_l1": 19, "add_reduce_l2": 19, "add_reduce_logsum": 19, "add_reduce_logsumexp": 19, "add_reduce_max": 19, "add_reduce_mean": 19, "add_reduce_min": 19, "add_reduce_prod": 19, "add_reduce_sum": 19, "add_reduce_sumsquar": 19, "add_reorganize_data": 19, "add_reshap": 19, "add_reshape_dynam": 19, "add_reshape_lik": 19, "add_reshape_stat": 19, "add_resize_bilinear": 19, "add_revers": 19, "add_reverse_sequ": 19, "add_round": 19, "add_sampl": 17, "add_scal": 19, "add_scatt": 19, "add_scatter_along_axi": 19, "add_scatter_nd": 19, "add_sequence_repeat": 19, "add_sign": 19, "add_simple_rnn": 19, "add_sin": 19, "add_sinh": 19, "add_slic": 19, "add_slice_by_s": 19, "add_slice_dynam": 19, "add_slice_stat": 19, "add_sliding_window": 19, "add_softmax": 19, "add_softmax_nd": 19, "add_split": 19, "add_split_nd": 19, "add_squeez": 19, "add_stack": 19, "add_subtract_broadcast": 19, "add_tan": 19, "add_tanh": 19, "add_til": 19, "add_topk": 19, "add_transpos": 19, "add_unari": 19, "add_unilstm": 19, "add_upper_triangular": 19, "add_upsampl": 19, "add_where_broadcast": 19, "add_where_nonzero": 19, "add_width_rang": 19, "addbroadcastablelayerparam": 19, "addit": [13, 15, 17, 19, 25, 28], "addition": [13, 29], "adjust": [19, 29], "advanc": [27, 29], "advancedquantizedlayerselector": 19, "affect": 14, "affin": [13, 22, 25, 28, 29], "affine_quantize_weight": [17, 18], "after": [3, 9, 13, 14, 17, 19, 27, 29], "again": 13, "against": 17, "aggreg": [21, 28], "aka": 13, "al": 14, "algebra": 14, "algorithm": [13, 14, 17, 19, 21, 22, 27, 28, 29], "align": [13, 19, 23], "align_corn": 13, "align_corners_": 19, "align_corners_fals": 19, "align_corners_tru": 19, "align_endpoints_mod": 19, "all": [0, 1, 3, 6, 9, 11, 12, 13, 14, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "allow": [0, 3, 12, 13, 14, 17, 19, 24, 27, 28, 29], "allow_set": 19, "allowed_rang": 17, "allowed_set": [17, 19], "allowed_valu": 17, "along": [13, 14, 17, 19, 21, 23, 27, 28, 29], "alongsid": [21, 22], "alpha": [13, 14, 19], "alpha_i": 13, "alreadi": [13, 19, 21, 22, 23, 24, 27, 28, 29], "also": [3, 9, 13, 14, 17, 19, 22, 24, 25, 28, 29], "alter": [19, 22, 24], "although": [13, 14], "alwai": [13, 14, 17, 29], "ambiti": 14, "amen": [2, 3], "amount": [13, 19], "an": [0, 2, 3, 9, 10, 12, 13, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 25, 27, 28, 29], "ani": [0, 2, 3, 9, 13, 14, 17, 19, 22, 24, 25, 27, 28, 29], "annot": 19, "anoth": [13, 14, 22, 25], "anywher": [14, 28, 29], "ao": 29, "api": [9, 14, 22, 25, 27], "appear": 13, "append": [13, 14, 17, 19, 22, 25], "appendix": 14, "appli": [0, 2, 3, 9, 13, 14, 17, 19, 23, 25, 27, 28, 29], "applic": [13, 14, 19, 29], "apply_pipelin": 14, "appropri": [27, 29], "approxim": [13, 14, 17, 27], "ar": [2, 3, 9, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "arang": 13, "arbitrari": 29, "arbitrarili": 29, "arc": 19, "area": [17, 19], "aren": 27, "arg": [14, 21, 22, 23, 24, 25, 29], "argmax": [0, 2, 3, 13, 19], "argmaxlayerparam": 19, "argmin": [13, 19], "argminlayerparam": 19, "argsort": [13, 19], "argsortlayerparam": 19, "argument": [9, 13, 14, 17, 19, 28, 29], "arithmet": [14, 29], "around": 17, "arrai": [9, 13, 15, 17, 19, 21, 22, 25, 28], "arrang": 14, "array_shap": 19, "arriv": 14, "artifact": 17, "as_default": 9, "as_dict": [27, 28, 29], "ascend": 13, "asin": [13, 19], "asinh": 19, "asinhlayerparam": 19, "asinlayerparam": 19, "assert_allclos": [2, 3], "asset": 17, "assign": 13, "associ": 14, "assum": [12, 13, 14, 15, 17, 19, 21], "astyp": 17, "asymmetr": 19, "atan": [13, 19], "atanh": [13, 19], "atanhlayerparam": 19, "atanlayerparam": 19, "atol": [2, 3], "attach": [3, 19, 28], "attent": [13, 27], "attn_mask": 13, "attn_weight": 13, "attribut": [13, 14, 19, 22, 25, 29], "author": 17, "auto": [9, 13, 24, 27], "auto_set_specification_vers": 17, "autogener": 12, "autograd": 27, "automat": [9, 14, 17, 19, 21], "av": 19, "avail": [9, 13, 14, 17, 19, 21, 27], "availabletarget": 11, "averag": [0, 2, 3, 13, 19], "average_pooling_count_excludes_pad": 19, "avg": 19, "avg_pool": 13, "avoid": [9, 13, 14, 19], "awar": [27, 29], "ax": [13, 14, 19], "axi": [13, 14, 19, 21, 23, 24, 27, 29], "axis_update_op": 14, "b": [13, 14, 15, 17, 19, 22, 24], "b0": 13, "b1": 13, "b2": 13, "b3": 13, "b_": 13, "b_back": 19, "b_f": 19, "b_i": 19, "b_o": 19, "b_r": 19, "b_z": 19, "back": [14, 19, 22, 25], "backend": [12, 13, 14], "backward": [0, 2, 3, 13, 19, 27, 28, 29], "band": [13, 19], "band_part": 13, "bandwidth": 29, "base": [13, 14, 17, 19, 21, 22, 24, 25, 27, 28, 29], "basic": [12, 13], "batch": [2, 13, 17, 19, 27, 29], "batch_axi": [13, 19], "batch_dim": 13, "batch_idx": [0, 2, 3], "batch_mod": 27, "batch_norm": [13, 14], "batch_predict": 17, "batch_siz": [0, 2, 3], "batch_to_spac": 13, "batchedmatmullayerparam": 19, "batchnorm": [9, 14, 19], "batchnorm2d": 0, "batchnormlayerparam": 19, "bath": [15, 17], "bathroom": 17, "bchw": 14, "becaus": [2, 13, 14, 17, 21, 29], "becom": [13, 14, 23], "bedroom": [15, 17], "been": [13, 17, 19], "befor": [3, 9, 13, 14, 19, 21, 27, 28, 29], "begin": [2, 3, 13, 14, 19, 27, 28, 29], "begin_id": 19, "begin_mask": [13, 19], "begin_step": 28, "behav": [17, 19], "behavior": [9, 13, 17, 19, 28, 29], "behaviour": [13, 14], "being": [0, 2, 3, 14, 17, 19, 21, 27], "below": [9, 13, 17, 19, 29], "benefici": 28, "benefit": 13, "bernoulli": [13, 19], "best": [24, 27], "beta": [13, 19], "beta1": 19, "beta2": 19, "beta_i": 13, "better": 14, "between": [3, 9, 13, 14, 15, 17, 19, 22, 23, 24, 27, 28, 29], "beyond": [13, 27], "bf16": 27, "bgr": [12, 19], "bhwc": 14, "bi": [13, 19], "bia": [12, 13, 14, 19, 22, 24, 25], "bias": [13, 19], "bias_back": 13, "bias_k": 14, "bias_q": 14, "biaslayerparam": 19, "bidirect": 13, "bidirectin": 13, "bidirectionallstmlayerparam": 19, "big": 23, "bigger": [21, 22, 23, 25], "bilinear": [13, 19], "bin": [17, 21], "binari": [13, 14, 17, 19], "bipartit": 14, "bipartitl": 14, "bisect_model": 17, "bit": [0, 13, 16, 19, 21, 22, 23, 24, 25, 27, 28, 29], "bit_mask": [22, 23], "bitmask": 13, "blank": 19, "bloat": 14, "blob": [12, 17, 19], "block": [11, 13, 14, 19, 23, 24, 27, 28, 29], "block0": 14, "block5": 14, "block6": 14, "block_shap": 13, "block_siz": [13, 19, 23, 24, 28, 29], "blockwis": 29, "blockwise_palettize_weight": [22, 24], "blue": [12, 19], "blue_bia": 19, "bn1": 0, "bodi": [13, 19], "body_network": 19, "bodynetwork": 19, "bool": [9, 12, 13, 14, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "boolean": [9, 13, 19], "boost": 15, "booster": 16, "border": 13, "border_mod": 19, "border_mor": 19, "both": [9, 13, 14, 17, 19, 22, 24, 28], "bottom": [13, 19], "bottom_right_heavi": 19, "bound": [12, 13, 19], "boundari": 13, "box": [13, 19], "box_coordinate_mod": 13, "box_height": [13, 19], "box_index_t": 13, "box_indic": 13, "box_indices_mod": 19, "box_t": 13, "box_width": [13, 19], "branch": [13, 14, 17, 19], "branch_mod": 17, "branchlayerparam": 19, "branchonvalueequ": 17, "branchonvaluegreaterthan": 17, "branchonvaluegreaterthanequ": 17, "branchonvaluelessthan": 17, "branchonvaluelessthanequ": 17, "branchonvaluenotequ": 17, "breviti": 13, "broacas": 14, "broadcast": [13, 14, 19], "broadcast_to_dynam": 19, "broadcast_to_lik": 19, "broadcast_to_stat": 19, "broadcasttodynamiclayerparam": 19, "broadcasttolikelayerparam": 19, "broadcasttostaticlayerparam": 19, "broken": 14, "bsd": 17, "buffer": [9, 27], "build": [14, 17, 19], "build_nested_block": 13, "builder": [5, 17], "built": 19, "bundl": 17, "button": 14, "byte": [13, 19, 21, 22], "c": [10, 13, 14, 15, 19], "c_": 13, "c_i": 28, "c_in": [13, 24], "c_input": 19, "c_o": 28, "c_out": [13, 24], "c_output": 19, "c_reverse_input": 19, "c_reverse_output": 19, "c_t": 13, "cach": [9, 28, 29], "cacher": [28, 29], "calcul": [13, 19, 21, 27], "calibr": [24, 27, 28, 29], "calibration_nsampl": [27, 28, 29], "call": [0, 2, 3, 9, 13, 17, 22, 24, 27, 28], "callabl": [19, 21, 25, 27, 28, 29], "can": [0, 2, 3, 9, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 25, 27, 28, 29], "cancel": 14, "candid": 14, "cannot": [9, 13, 14, 17, 29], "canon": 14, "canonic": 14, "canonicalize_inplace_pattern": 14, "cap": 14, "captur": 29, "carri": [27, 28, 29], "case": [2, 9, 13, 14, 15, 17, 19, 21, 22, 24, 27], "cast": [9, 13, 14, 22, 24], "cast_optim": 14, "cast_to_8_bit_integ": [22, 24], "castop": 14, "castop_1": 14, "categor": [13, 14, 17, 19], "categori": [14, 19], "categorical_distribut": 19, "categoricaldistributionlayerparam": 19, "categror": 14, "caus": [9, 13, 14], "cce": 19, "ceil": [13, 19], "ceil_mod": 13, "ceillayerparam": 19, "cell": [13, 19], "cell_activ": 13, "cell_clip_threshold": 19, "cell_gat": 13, "cell_state_update_activ": 19, "center": [13, 19], "center_size_height_first": [13, 19], "center_size_width_first": [13, 19], "central": 19, "centroid": [13, 21, 27], "certain": [2, 19, 21, 28], "chain": 14, "chang": [2, 3, 13, 14, 23, 27, 28], "channel": [12, 13, 14, 19, 21, 22, 23, 24, 27, 28, 29], "channel_axi": [21, 27], "channel_first": [12, 14, 19], "channel_last": [14, 19], "channel_multipli": 19, "channelwise_palettize_weight": [21, 22], "character": [19, 24], "check": [13, 14, 17, 29], "check_output_correct": 17, "child": [14, 17, 22, 25, 28, 29], "child_op": [22, 25], "children": 14, "choos": [2, 13, 22, 24], "chosen": [13, 15, 27, 28, 29], "chr": 13, "chunk": [17, 23], "chunk1": 17, "chunk2": 17, "chunked_pipelin": 17, "chw": [13, 19], "cin": 24, "clamp": [13, 19, 27], "clamped_relu": 13, "clampedrelulayerparam": 19, "class": [2, 3, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 27, 28, 29], "class_label": [12, 16, 17, 19], "classif": [16, 17], "classifi": [9, 12, 14, 15, 16, 17, 19], "classification_softmax": 17, "classifier_config": 9, "classifierconfig": [9, 13], "classlabel": [12, 15, 19], "classmethod": [25, 27, 28, 29], "classprob": 17, "classscor": 15, "classt": 13, "clean": 14, "cleanup": 9, "click": 14, "clip": [13, 19, 22, 24], "cliplayerparam": 19, "close": 29, "closer": [14, 27], "closest": 27, "cluster": [0, 19, 21, 27], "cluster_dim": [21, 27], "cluster_dtyp": 27, "cluster_permut": 27, "co": [13, 19], "code": [0, 1, 2, 3, 9, 12, 14, 17, 27], "col": 13, "collect": [0, 2, 3, 21, 28, 29], "color": 12, "color_layout": [9, 12], "colorlayout": [9, 12], "column": [10, 13, 17, 29], "come": 13, "comma": 14, "commit": 28, "common": [9, 14], "compar": [0, 13, 14, 17, 19, 22, 24, 28], "compare_model": 19, "comparison": [17, 19], "compat": [9, 12, 13, 19, 24], "compil": [9, 13, 14], "compile_model": 17, "compiled_model_path": 17, "compiledmlmodel": 17, "complet": 13, "complex": [2, 3], "complic": 14, "compliment": 14, "compon": 14, "compos": [0, 2, 3, 14], "compose_conv1d": 14, "composit": 14, "compress": [9, 13, 14, 19, 20, 21, 23, 24, 25, 27, 28, 29], "compressed_model": [21, 22, 23, 24, 27, 28, 29], "compressed_model_a8": 24, "compressed_model_w8a8": 24, "compression_util": 18, "compressiongranular": [21, 24], "compressor": [27, 28, 29], "comput": [9, 13, 14, 17, 19, 21, 22, 23, 24, 27, 28, 29], "compute_mean_var": 19, "compute_precis": 9, "compute_spars": 28, "compute_unit": [9, 17], "computeunit": [9, 17], "concat": [13, 14, 19], "concat_nd": 19, "concat_to_pixel_shuffl": 14, "concaten": [13, 19], "concatndlayerparam": 19, "concret": [9, 13, 14], "cond": [13, 14], "cond_var": 14, "condit": [13, 14, 17, 19], "condition_network": 19, "conduct": 29, "config": [0, 2, 3, 21, 22, 23, 24, 25, 27, 28, 29], "config_dict": [25, 27, 28, 29], "config_typ": 25, "configur": [3, 9, 12, 25, 27, 28, 29], "conjunct": 27, "connect": 14, "consecut": [14, 27], "conserv": 14, "consid": [12, 13, 14, 21, 22], "consist": [2, 3, 14, 17], "const": [9, 13, 14, 21, 22, 23, 24, 25], "const_2": 14, "const_3": 14, "const_dedupl": 14, "const_elimin": [9, 14], "const_threshold": 14, "constant": [13, 14, 17, 19, 22, 23, 25, 28], "constant_elimin": 9, "constant_v": [13, 14], "constant_valu": 19, "constantpaddinglayerparam": 19, "constantsparsityschedul": 28, "constexpr": [14, 22, 25], "constexpr_": 14, "constexpr_affine_dequant": [13, 14, 22, 24, 25], "constexpr_blockwise_shift_scal": [13, 22, 24], "constexpr_cast": 13, "constexpr_lut_to_dens": [13, 21, 22, 24, 25], "constexpr_lut_to_spars": [13, 21, 22], "constexpr_sparse_blockwise_shift_scal": [13, 22, 23], "constexpr_sparse_to_dens": [13, 21, 22, 23, 25], "constraint": [13, 14, 29], "construct": [3, 11, 13, 17, 19, 21, 22, 25], "constructor": 17, "constsexpr_sparse_to_dens": 23, "consum": 14, "consumpt": [20, 28], "contain": [9, 13, 14, 17, 19, 21, 22, 23, 25, 27, 28, 29], "content": 13, "context_length": 17, "contigu": [15, 28, 29], "continu": [13, 19, 21], "contribut": 14, "control": [9, 13, 19, 24, 25, 27, 28, 29], "conv": [2, 3, 9, 14, 22, 23, 24, 25, 27, 28, 29], "conv1": [0, 28, 29], "conv1d": 14, "conv2": [0, 27, 28, 29], "conv2d": [0, 2, 3, 14, 27, 28, 29], "conv_1": [22, 25], "conv_1_bia": [22, 25], "conv_1_weight": [22, 25], "conv_config": [3, 25], "conv_in": 13, "conv_out": 13, "conv_transpos": [13, 14, 22, 24], "convent": 13, "convers": [9, 12, 14, 17, 19], "convert": [0, 2, 3, 5, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 29], "convert_double_to_float_multiarray_typ": 17, "convert_to": [9, 17], "convolut": [2, 3, 13, 14, 19, 25, 29], "convolution3dlayerparam": 19, "convolutionlayerparam": 19, "convolv": 13, "coordin": [13, 19], "coordinates_mod": 13, "copi": [13, 17, 19, 27, 28, 29], "copylayerparam": 19, "core": [0, 2, 3, 5, 9, 12, 14, 15, 16, 17, 19], "coreml": [13, 16, 17, 18, 21, 22, 23, 24, 25], "coreml8": 14, "coreml_dialect": 13, "coreml_model": [0, 2, 3, 15, 16], "coreml_update_st": 14, "coremlopmetadata": [22, 25], "coremlpython": 17, "coremltool": [0, 2, 3, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 27, 28, 29], "coremlweightmetadata": [22, 25], "corner": 13, "corners_height_first": [13, 19], "corners_width_first": [13, 19], "correct": [0, 2, 3, 14, 17], "correspond": [3, 9, 13, 14, 15, 17, 19, 22, 24, 25, 27, 28, 29], "correspondingli": 14, "cosh": 13, "coshlayerparam": 19, "cosin": [13, 19], "coslayerparam": 19, "cost": 13, "could": [13, 14, 21, 22, 23, 24], "count": [13, 14, 17, 28], "counter": 28, "counterpart": 29, "coupl": 19, "coupled_input_forget_g": 19, "cours": [3, 28], "cout": [14, 24], "cpu": [9, 17, 21, 27, 28], "cpu_and_gpu": [9, 17], "cpu_and_n": [9, 17], "cpu_onli": [9, 17], "creat": [2, 3, 9, 11, 12, 13, 14, 17, 19, 25, 27, 28, 29], "create_array_feature_extractor": 17, "create_feature_vector": 17, "create_model_loss_and_optim": 27, "criteria": 27, "criterion": 19, "crop": [13, 19], "crop_and_res": 13, "crop_height": 13, "crop_res": 13, "crop_width": 13, "croplayerparam": 19, "cropresizelayerparam": 19, "cross": 19, "csv": [15, 17], "ct": [0, 2, 3, 9, 11, 12, 14, 17, 21, 22, 23, 24, 25], "cto": [21, 22, 23, 24], "cum": 19, "cumsum": [13, 19], "cumsumlayerparam": 19, "cumul": [13, 19], "current": [13, 14, 15, 17, 19, 22, 24, 27, 28, 29], "current_nam": 17, "curv": 3, "custom": [9, 13, 19, 21, 27], "custom_config": 27, "custom_lut": 19, "custom_padding_back": 19, "custom_padding_bottom": 19, "custom_padding_front": 19, "custom_padding_left": 19, "custom_padding_right": 19, "custom_padding_top": 19, "custom_proto_spec": 19, "customlayerparam": 19, "d": [13, 19, 21, 27], "d0": 13, "d1": [13, 14], "d2": [13, 14], "d3": [13, 14], "d4": 14, "d_axi": 13, "d_axis_i": 13, "d_axis_out": 13, "d_in": 13, "d_n": 13, "d_out": 13, "d_post": 13, "d_prior": 13, "dampen": [28, 29], "data": [0, 2, 3, 13, 14, 15, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "data1": 19, "data2": 19, "data_and_predict": 17, "data_dict": [27, 28, 29], "data_dir": [0, 2, 3], "data_mask": 13, "data_path": [0, 2, 3], "data_point": 17, "datafram": 17, "dataload": [0, 2, 3, 27, 28, 29], "datalod": [28, 29], "dataset": 21, "datatyp": [15, 17, 19], "db": 17, "de": 13, "dead": 14, "dead_code_elimin": [9, 14], "deal": 17, "debug": [9, 14], "debugging_util": 17, "decai": 19, "decid": [14, 27], "decis": 15, "decision_tre": 16, "decod": [28, 29], "decomposit": 14, "decompress": [13, 14, 22, 25], "decompress_weight": [22, 25], "decompressed_model": [22, 25], "deconv": [14, 19], "deconvolut": [13, 19], "decor": [11, 14], "decreas": [13, 19], "dedup": 14, "dedup_op_and_var_nam": 14, "dedupl": [14, 17], "deep": 17, "deep_featur": 17, "def": [0, 2, 3, 9, 11, 13, 14, 17, 19, 21, 27], "default": [9, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "default_class_label": 17, "default_function_nam": 17, "default_label": 17, "default_palett": [0, 9], "default_prun": [3, 9], "default_valu": 12, "defaultfunctionnam": 17, "defin": [2, 3, 13, 17, 19, 27, 28, 29], "define_loss": [28, 29], "define_model": 29, "definit": 14, "degrad": [2, 27], "delet": [14, 17], "delta": 13, "demonstr": 3, "denomin": [13, 19], "denot": [13, 17], "dens": [0, 2, 3, 9, 12, 13, 21, 22, 23, 24, 27], "dense1": 0, "dense2": 0, "dense_2": 19, "depend": [3, 9, 13, 19, 22, 25], "deploi": [0, 2, 3, 20], "deploy": [9, 22, 24], "deprec": 18, "depth": [13, 19], "depth_to_spac": [13, 19], "depthwis": [13, 19], "depthwiseconv": 19, "dequant": [13, 14, 19, 24, 29], "dequantize_quantize_pair_elimin": 14, "dequantize_to_constexpr": 14, "deriv": [13, 19], "desc": 17, "descend": [13, 14, 19, 29], "descent": 19, "describ": [0, 11, 13, 14, 19, 27], "descript": [13, 17, 19, 22, 24], "descriptor": 17, "design": 17, "desir": [17, 28], "destin": [14, 17], "destination_path": 17, "detail": [2, 3, 9, 11, 13, 14, 19, 22, 25, 27, 28, 29], "detect": 14, "detect_concat_interleav": 14, "determin": [9, 13, 14, 15, 17, 19, 21, 24, 28], "deviat": [13, 19], "devic": [20, 23, 28, 29], "dfrac": 13, "diagon": [13, 19, 28, 29], "dialect": [9, 13], "dict": [13, 14, 15, 17, 19, 22, 24, 25, 27, 28, 29], "dictabledataclass": [27, 28, 29], "dictat": 13, "dictionari": [13, 14, 15, 17, 19, 22, 24, 25, 27, 28, 29], "dictvector": 15, "did": 14, "differ": [3, 9, 13, 14, 17, 19, 24, 25, 27, 28, 29], "differenti": [6, 26, 27], "dilat": [13, 19], "dilated_kernel": 13, "dilation_depth": 19, "dilation_factor": 19, "dilation_height": 19, "dilation_width": 19, "dim": [0, 2, 3, 12, 13, 14, 19, 23, 24, 28], "dimens": [13, 14, 15, 17, 19, 21, 22, 23, 24, 27, 28, 29], "dimension": [13, 14, 17, 19, 29], "dir": 17, "direct": [13, 14, 17, 19, 29], "directli": [9, 13, 17], "directori": [9, 17, 19], "disabl": [17, 28, 29], "disable_rank5_shape_map": 19, "discov": 21, "discret": [17, 21, 22], "disk": [0, 17], "displai": [9, 19, 21], "distanc": 27, "distinct": 19, "distribut": [13, 14, 19, 21, 27], "distributive_quantized_binary_op_scale_norm": 14, "div": [13, 14, 19], "divid": [13, 14, 19, 21, 22, 27], "divide_broadcast": 19, "divide_to_multipli": 14, "dividebroadcastablelayerparam": 19, "divis": [13, 19, 23, 24, 28], "divisor": 14, "dkm": 27, "dkm_palett": [0, 6], "dkmpalett": [0, 27], "dkmpalettizerconfig": [0, 27], "dn": [13, 14], "do": [0, 13, 14, 17, 21, 25], "do_quant": 19, "doc": [22, 25], "docstr": 9, "document": [0, 2, 3, 5, 13, 14], "doe": [13, 14, 17], "doesn": [0, 13, 14, 17, 29], "domain": 13, "don": 17, "done": [9, 13], "dot": [13, 19], "doubl": [14, 15, 17, 19], "doubletyp": 17, "dout": [13, 14], "down": [14, 19], "downcast": 14, "download": [0, 1, 2, 3], "downscal": 13, "downscale_factor": 13, "downstream": 14, "draw": 13, "drawn": 13, "dropout": 0, "dropout_p": 13, "dstt": 13, "dtype": [9, 12, 13, 14, 18, 19, 22, 24, 25, 27, 28, 29], "due": [9, 13, 14], "dummi": [13, 14], "duplic": 14, "dure": [12, 14, 19, 20, 27, 29], "dynam": [13, 14, 17, 29], "dynamic_length": 13, "dynamic_shape_mlmodel": 17, "e": [13, 14, 17, 19, 28], "each": [0, 3, 9, 10, 11, 12, 13, 14, 15, 17, 19, 21, 22, 24, 27, 28, 29], "easier": [14, 17], "edg": [9, 13], "edkm": 27, "effect": [13, 14, 21, 24, 27, 29], "efficaci": 28, "effici": [22, 23, 27, 29], "einsum": 13, "either": [3, 9, 13, 14, 17, 19, 22, 23, 24, 27, 28, 29], "elem_shap": 13, "element": [0, 3, 9, 12, 13, 14, 19, 21, 22, 23, 24, 25, 27, 28, 29], "elementari": 13, "elementwis": [13, 14, 19], "elimin": 14, "els": [13, 14, 19, 21, 28], "else_branch": 19, "elu": [13, 19], "ema_min_max": 29, "ema_ms": 29, "ema_percentil": 29, "embed": [13, 15, 19], "embedding_s": 19, "embeddinglayerparam": 19, "embeddingndlayerparam": 19, "empti": [9, 13, 17, 19], "enabl": [17, 19, 23, 25, 27, 28, 29], "enable_normal_float": [28, 29], "enable_per_channel_scal": [21, 27], "encod": [15, 17, 19], "encompass": 28, "end": [0, 2, 3, 13, 14, 17, 19, 28], "end_id": 19, "end_index": [19, 28], "end_mask": [13, 19], "enforc": [13, 27], "enforce_zero": 27, "engin": [9, 17, 28], "enough": 14, "ensembl": 17, "ensur": [13, 14], "entir": [13, 14, 17, 19, 21], "entri": [13, 17, 21, 22, 27], "entropi": 19, "enum": [9, 11, 17, 19, 29], "enumer": [0, 2, 3, 9, 12, 17, 19, 21], "enumerated_shap": 19, "ep": [0, 2, 3, 19], "epoch": [0, 2, 3, 27], "epsilon": [10, 13, 19], "eq": [0, 2, 3], "equal": [13, 17, 19, 23, 24, 27, 28, 29], "equallayerparam": 19, "equat": [13, 22, 24], "equival": [13, 14, 17, 19], "erf": [13, 14, 19], "erflayerparam": 19, "error": [13, 17, 19, 28, 29], "especi": 21, "essenti": 14, "estim": [19, 29], "et": 14, "etc": [9, 13, 14, 17, 24, 28], "ev": 13, "eval": [0, 2, 3, 9], "eval_model": [0, 2, 3], "evalu": [0, 2, 3, 13, 17, 19, 29], "evaluate_classifi": 17, "evaluate_classifier_with_prob": 17, "evaluate_regressor": 17, "evaluate_transform": 17, "even": [13, 14, 21, 29], "evenli": 19, "event": 13, "eventu": 13, "everi": [3, 9, 13, 14, 19, 23, 28], "everyth": [13, 17, 19], "evolv": 3, "exact": [13, 14, 19, 22, 24, 28, 29], "exact_array_map": 19, "exactli": [12, 13, 19, 23, 27], "exampl": [0, 1, 2, 3, 4, 5, 6, 9, 11, 12, 13, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 25, 27, 28, 29], "example_input": [0, 2, 3, 9, 29], "exce": 27, "except": [9, 12, 13, 19], "exclud": [13, 19], "exclude_pad_area": 19, "exclude_padding_from_averag": 13, "exclus": [13, 19], "execut": [4, 6, 9, 13, 17, 19, 28, 29], "exist": [0, 2, 3, 13, 14, 17, 19, 27, 28, 29], "exp": [13, 19], "exp2": [13, 19], "exp2layerparam": 19, "expand": [13, 14, 17, 19], "expand_dim": [13, 14], "expand_dynamic_linear": 14, "expand_high_rank_reshape_and_transpos": 14, "expanddimslayerparam": 19, "expandus": [0, 2, 3], "expect": [13, 16, 17, 19, 24], "expected_output": 17, "experi": 17, "experienti": 19, "experiment": [14, 24], "explicitli": 13, "explor": [14, 28], "expon": [13, 19, 28], "exponenti": [13, 19], "export": [9, 17, 27, 28, 29], "exportedprogram": 9, "expos": [12, 15, 16, 17, 19], "express": 13, "extend": 28, "extens": [9, 27], "extra": [9, 13, 27], "extract": [13, 17, 19, 21], "extract_indic": 17, "extractor": 17, "extrapolation_valu": 13, "extrema": 13, "f": [0, 2, 3, 13, 19], "f16": 27, "f32": [13, 27], "f_t": 13, "fact": 27, "factor": [12, 13, 14, 19, 22, 24, 27, 28, 29], "fail": [9, 14], "fake": [2, 29], "fakepalett": 27, "fakequant": 29, "fall": [13, 21], "fals": [0, 2, 3, 9, 12, 13, 14, 17, 19, 21, 24, 25, 27, 28, 29], "false_child_id": 17, "fashion": [14, 28], "faster": 14, "fcrn": 14, "featur": [10, 12, 15, 16, 17, 19], "feature1": 19, "feature2": 19, "feature_index": 17, "feature_nam": [10, 16, 19], "feature_valu": 17, "fed": [12, 28, 29], "feed": [14, 21, 22, 24, 25, 28, 29], "feet": 17, "fetch": [13, 27, 28, 29], "few": [2, 3, 14, 27, 28, 29], "field": [12, 13, 19], "file": [4, 6, 9, 12, 16, 17, 25, 27, 28, 29], "file_valu": 13, "filenam": 17, "fill": [13, 14, 19], "fill_dynam": 19, "fill_lik": [13, 19], "fill_stat": 19, "filldynamiclayerparam": 19, "filllikelayerparam": 19, "fillstaticlayerparam": 19, "filter": [13, 19], "final": [0, 13, 14, 15, 17, 19, 23, 27, 28, 29], "finalized_model": 0, "find": [3, 14], "fine": [23, 25, 27, 28], "finit": 12, "first": [0, 2, 3, 12, 13, 14, 17, 19, 21, 22, 23, 24, 28, 29], "firstlayerinputcach": [28, 29], "fisher": 27, "fisrt": 28, "fit": 15, "fix": [13, 14, 17, 21, 27], "flag": [9, 14, 17, 19], "flat": [9, 13], "flatten": [0, 2, 3, 13, 19, 21, 23], "flatten2d": 13, "flatten_to_2d": 19, "flattenlayerparam": 19, "flattento2dlayerparam": 19, "flexibl": 19, "flexible_shape_util": 17, "float": [9, 12, 13, 14, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "float16": [9, 12, 14, 19], "float32": [9, 12, 14, 17, 19, 22, 25, 28, 29], "floor": [13, 19, 23, 28], "floor_div": 13, "floor_div_broadcast": 19, "floordivbroadcastablelayerparam": 19, "floorlayerparam": 19, "flow": [19, 29], "fold": [2, 9, 13, 14, 27, 28], "follow": [2, 3, 9, 12, 13, 14, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "footprint": [0, 3], "forc": 19, "force_32bit_float": 16, "forest": 15, "forget": [13, 19], "forget_bia": 19, "forget_g": 13, "form": [0, 9, 14, 15, 17, 21, 27, 29], "format": [0, 2, 3, 5, 9, 12, 13, 14, 15, 16, 17, 19, 22, 23, 24, 25, 28, 29], "formula": [13, 19, 22, 24, 28], "forward": [3, 9, 13, 19, 28, 29], "found": [9, 19, 27], "four": [13, 17, 19, 21, 22, 25, 28, 29], "fourth": 13, "fp16": [12, 13, 14, 22, 24, 27], "fp16computeprecis": [9, 14], "fp32": [12, 13, 14], "frac": [13, 17, 28], "fraction": [13, 28], "fragment": 14, "framework": [9, 13, 14, 17], "freez": [3, 29], "freshli": 14, "from": [0, 2, 3, 4, 6, 9, 11, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 25, 27, 28, 29], "from_dict": [0, 25, 27, 28, 29], "from_yaml": [25, 27, 28, 29], "front": 19, "frozen": [2, 9, 29], "full": [0, 2, 3, 19, 29], "full_precision_model": 19, "fulli": [24, 27, 28, 29], "func_input": 11, "function": [0, 2, 3, 9, 11, 13, 14, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "function_nam": [11, 17], "function_name_to_materialization_map": [14, 17], "further": [14, 21, 22, 23, 24, 27], "fuse": [9, 14], "fuse_conv_batchnorm": [9, 14], "fuse_conv_bia": 14, "fuse_conv_scal": 14, "fuse_elementwise_to_batchnorm": 14, "fuse_gelu_exact": 14, "fuse_gelu_tanh_approxim": 14, "fuse_layernorm_or_instancenorm": 14, "fuse_leaky_relu": 14, "fuse_linear_bia": 14, "fuse_matmul_weight_bia": 14, "fuse_onehot_matmul_to_gath": 14, "fuse_pad_conv": 14, "fuse_prelu": 14, "fuse_reduce_mean": 14, "fuse_transpose_matmul": 14, "fusion": [13, 14], "futur": [14, 18], "fx": 29, "g": [12, 13, 14, 17, 28], "gain": [23, 28], "galleri": [0, 1, 2, 3, 6], "gamma": [13, 19], "gamma_i": 13, "gate": [13, 19], "gather": [13, 14, 19], "gather_along_axi": [13, 19], "gather_nd": 13, "gatheralongaxislayerparam": 19, "gatherlayerparam": 19, "gatherndlayerparam": 19, "gauss": 13, "gaussian": [13, 19], "gelu": [13, 14, 19], "gelulayerparam": 19, "gener": [0, 1, 2, 3, 9, 12, 13, 14, 19, 21, 24, 28, 29], "generic_pass_infrastructur": 14, "get": [13, 14, 17, 21, 22, 23, 24, 25, 28, 29], "get_compiled_model_path": 17, "get_gelu_pattern1": 14, "get_gelu_pattern2": 14, "get_shap": 19, "get_spec": [17, 19], "get_submodul": [27, 28, 29], "get_weights_metadata": [22, 25], "getshapelayerparam": 19, "github": 5, "give": [13, 15, 17], "given": [9, 13, 14, 15, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "glmregressor": 17, "global": [19, 25, 27, 28, 29], "global_config": [2, 21, 22, 23, 24, 25, 27, 28, 29], "global_pool": 13, "globalconfigtyp": 27, "globalpooling3dlayerparam": 19, "go": [0, 2, 3, 13, 14, 17, 27], "got": 13, "gptq": 28, "gpu": [9, 17, 27, 29], "gradient": [13, 15, 19, 27], "gradual": 3, "grain": [23, 25, 28], "granular": [21, 24, 27, 28, 29], "graph": [5, 9, 12, 13, 21, 22, 23, 24], "graphdef": 9, "graphic": 14, "graphmodul": 29, "gray_bia": 19, "grayscal": [12, 19], "grayscale_float16": [9, 12], "greater": [13, 14, 19, 21, 22, 23, 24, 25, 28], "greater_equ": 13, "greater_than": 19, "greaterequallayerparam": 19, "greaterthanlayerparam": 19, "green": [12, 19], "green_bia": 19, "grid": [13, 19], "grid_point": [13, 19], "group": [13, 19, 21, 27], "group_siz": [21, 27], "gru": [13, 19], "grulayerparam": 19, "gsm": 27, "guarante": [13, 14], "guid": 5, "h": [9, 13, 14, 19, 28], "h1": [13, 14], "h2": [13, 14], "h5": 9, "h5_path": 9, "h_": 13, "h_center": [13, 19], "h_end": [13, 19], "h_in": [13, 19], "h_input": 19, "h_output": 19, "h_reverse_input": 19, "h_reverse_output": 19, "h_start": [13, 19], "h_t": 13, "ha": [3, 9, 13, 14, 17, 19, 21, 22, 24, 25, 27, 29], "had": 3, "half": [13, 19, 28], "half_pixel_cent": 13, "hand": 14, "handl": [14, 15, 19], "happen": [14, 21], "hard": [19, 27], "hard_sigmoid": 13, "hardshrink": 27, "hardwar": 29, "has_bia": 19, "hasn": 17, "have": [0, 3, 9, 12, 13, 14, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "hdf5": 9, "height": [9, 13, 19], "height_rang": 19, "help": [0, 19, 23], "henc": [13, 14, 21, 22, 23, 24, 25, 28], "here": [2, 13, 14, 17, 24, 28], "hessian": [27, 28, 29], "hessian_dampen": [28, 29], "hf": 13, "hh": 13, "hi": 13, "hidden": [13, 19], "hidden_s": 19, "high": [13, 17, 22, 24, 28, 29], "higher": [13, 27], "highest": 17, "highlight": 14, "hint": 17, "histogram": [21, 22], "hit": 17, "hn": 14, "ho": 13, "hold": [13, 17, 19], "hook": [3, 27, 28], "horizont": 19, "hot": [13, 15, 19], "hous": [15, 17], "housepric": [15, 17], "how": [0, 2, 3, 13, 17, 21, 24, 27, 28, 29], "howev": [14, 29], "hr": 13, "huge": 27, "huggingfac": [28, 29], "hw": 19, "hyper": 28, "hyperbol": [13, 19], "hyperparamet": [3, 29], "hypothet": 14, "hz": 13, "i": [0, 2, 3, 5, 9, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 27, 28, 29], "i0": 13, "i1": 13, "i16": 13, "i2": 13, "i3": 13, "i32": [13, 14], "i4": 13, "i64": 13, "i8": [13, 27], "i_": 13, "i_0": 13, "i_t": 13, "id": [13, 17, 19], "idea": 28, "ident": [9, 13, 14, 15], "identifi": [14, 21], "idx": [13, 21, 22, 25], "if_branch": 19, "iff": 13, "ignor": [13, 14, 17, 19, 28], "ih": 13, "ii": 13, "illeg": 13, "illustr": [13, 14, 22, 24, 29], "imag": [0, 2, 3, 9, 12, 13, 14, 19], "image_format": 19, "image_input_nam": 19, "image_input_preprocess": 14, "image_s": 19, "image_scal": 19, "imagetyp": [9, 14], "img_size_rang": 19, "immedi": [14, 28, 29], "immediate_valu": 13, "impact": 27, "implement": [13, 14, 19, 21, 27, 28, 29], "impli": [12, 13], "import": [0, 2, 3, 9, 11, 15, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "impos": 29, "improv": 29, "imput": 15, "in1": 13, "in2": 13, "in_plac": 29, "includ": [9, 13, 14, 17, 19, 22, 25], "include_last_pixel": 19, "include_layers_with_nam": 19, "inclus": [13, 19], "increas": [3, 13, 19], "increment": [13, 17, 19, 28], "independ": 13, "index": [2, 5, 12, 13, 17, 19, 27, 28], "index_typ": 17, "indic": [13, 14, 15, 17, 19, 21, 22, 23, 27, 28, 29], "indices_mask": 13, "indices_nonzero_data": 13, "indices_shap": 13, "indicest": 13, "induc": [23, 28, 29], "inertia": 27, "inf": [13, 14], "infer": [2, 9, 12, 13, 14, 17, 20, 24, 28, 29], "infin": 13, "info": [13, 17], "inform": [11, 13, 14, 17, 19, 21, 22, 23, 24, 25, 27, 28], "inherit": [13, 19], "init_length": 13, "initi": [0, 2, 3, 12, 13, 17, 27, 28, 29], "initial_c": 13, "initial_h": 13, "initial_spars": 28, "inject": [9, 14], "inner": 19, "inner_activ": 19, "innermost": 13, "innerproduct": 19, "innerproductlayerparam": 19, "inp": 27, "inplac": [3, 14, 27, 28, 29], "input": [0, 2, 3, 5, 9, 11, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 27, 28, 29], "input_": 17, "input_1": 17, "input_2": 17, "input_cach": [28, 29], "input_channel": 19, "input_channel_block_s": 24, "input_data": 17, "input_descript": 17, "input_dim": 19, "input_featur": [15, 17, 19], "input_g": 13, "input_height": 19, "input_id": 17, "input_idx": 19, "input_interv": 13, "input_length": 10, "input_nam": [9, 17, 19], "input_rang": 19, "input_rank": 19, "input_s": 19, "input_shap": [13, 19], "input_spec": 11, "input_tensor": 19, "input_typ": 12, "input_width": 19, "insensit": 19, "insert": [0, 3, 13, 14, 17, 24, 27, 28, 29], "insert_pass": 14, "insid": [13, 17, 19, 27], "inspect": [9, 17, 19], "inspect_conv_channel": 19, "inspect_innerproduct_channel": 19, "inspect_input_featur": 19, "inspect_lay": 19, "inspect_loss_lay": 19, "inspect_optim": 19, "inspect_output_featur": 19, "inspect_updatable_lay": 19, "inspir": 28, "instal": [2, 5, 11], "instanc": [2, 3, 13, 15, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "instance_norm": [13, 14, 19], "instanti": 17, "instead": [12, 13, 14, 19], "instruct": [5, 14], "int": [12, 14, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "int16": [9, 13], "int3": [28, 29], "int32": [9, 12, 13, 14], "int4": [13, 22, 24, 28, 29], "int64": [15, 17], "int8": [13, 14, 18, 19, 22, 24, 27, 29], "int_8_dynamic_quant": 19, "int_op_canonic": 14, "integ": [13, 14, 17, 19, 21, 22, 23, 24, 28, 29], "intend": [9, 13, 14], "intent": 14, "interest": [13, 19], "interfac": [14, 16, 17, 19], "interleav": [13, 14, 19], "intermedi": [9, 12, 13, 14, 22, 24, 29], "intern": [13, 16, 17, 28], "interpol": [13, 19, 22, 24], "interpolation_mod": 13, "interpret": [13, 15, 17, 19], "intersect": [13, 19], "interv": [13, 19, 21], "introduc": [3, 13, 14], "invalid": [9, 13, 19], "invari": 14, "invers": [13, 19], "inverse_dist": 17, "invok": [9, 13, 14], "involv": [13, 14], "io": [2, 19, 25, 27, 28, 29], "ios13": 9, "ios14": 9, "ios15": [9, 13], "ios16": [0, 3, 11, 13], "ios17": [2, 13], "ios18": [9, 13], "iou": 13, "iou_threshold": [13, 19], "ip_lay": 19, "iphon": 20, "ipynb": [0, 2, 3], "ir": 13, "is0": [13, 14], "is1": 14, "is_bgr": 19, "is_caus": 13, "is_deconv": 19, "is_deprec": 25, "is_glob": 19, "is_logit": 19, "is_quantized_weight": 19, "is_temp_packag": 17, "is_updat": 17, "isflex": 19, "isn": 14, "issu": [9, 14], "item": [0, 2, 3, 22, 25, 28, 29], "iter": [9, 13, 14, 19, 21, 22, 27, 28, 29], "its": [0, 13, 14, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "itself": [12, 17], "iz": 13, "j": 13, "j0": 13, "jit": [0, 2, 3, 9], "joint": [21, 22, 23, 24, 27], "joint_compress": [21, 22, 23, 24], "jointli": [21, 22, 23, 24], "jupyt": [0, 1, 2, 3], "just": [14, 16], "k": [6, 13, 14, 19, 21, 22, 25, 26, 27], "k0": 13, "k1": 13, "k2": 13, "k_embed": 14, "kd": 17, "kd_tree": 17, "keep": [13, 14, 19, 28], "keep_dim": 13, "keepdim": [0, 2, 3, 19], "kei": [9, 12, 13, 15, 17, 19, 25, 27, 28, 29], "kera": [9, 14], "keras_model": 9, "kernel": [13, 14, 19], "kernel_channel": 19, "kernel_depth": 19, "kernel_height": 19, "kernel_s": 13, "kernel_width": 19, "key_1": 29, "key_2": 29, "keyword": [19, 29], "kh": [13, 24], "kind": 28, "kmean": [18, 21, 22, 25, 27], "kmeans_batch_threshold": 27, "kmeans_error_bnd": 27, "kmeans_init": 27, "kmeans_lut": 19, "kmeans_max_it": 27, "kmeans_n_init": 27, "kmeans_opt1d_threshold": 27, "knearestneighborsclassifi": 17, "knearestneighborsclassifierbuild": 17, "kneighborsclassifi": 15, "knnclassifi": 17, "know": 13, "known": [9, 13, 14, 19], "known_size_map": 17, "kv": 9, "kw": [13, 24], "kwarg": [13, 17, 19, 21, 22, 23, 24, 25], "l": 13, "l1": [13, 19], "l1_normal": 19, "l2": [13, 19, 23, 28, 29], "l2_norm": 13, "l2_normal": 19, "l2_pool": 13, "l2normalizelayerparam": 19, "label": [12, 13, 16, 17, 19, 27, 28, 29], "laid": 13, "laina": 14, "lambda": 9, "languag": [9, 12, 13, 14, 27, 28, 29], "larg": [9, 13, 14, 22, 23, 27], "large_weight": [22, 25], "larger": [13, 14, 27, 28], "largest": 19, "last": [12, 13, 14, 15, 17, 19, 28, 29], "latenc": [20, 23, 28, 29], "later": [9, 13, 14, 19], "lattic": 21, "layer": [0, 3, 9, 12, 13, 14, 19, 22, 23, 24, 25, 27, 28, 29], "layer_nam": 19, "layer_norm": [13, 14], "layer_typ": 19, "layernormalizationlayerparam": 19, "layerwise_compress": [28, 29], "layerwisecompressionalgorithmconfig": [28, 29], "layerwisecompressor": [28, 29], "layerwisecompressorconfig": [27, 28, 29], "layout": [12, 13], "lead": [2, 9, 13, 14, 27], "leaf": 17, "leaf_siz": 17, "leakag": 14, "leaki": 19, "leaky_relu": [13, 14], "leakyrelu": 19, "learn": [0, 2, 3, 9, 15, 19, 23, 27, 28, 29], "learnt": 27, "least": [13, 19], "leav": [17, 19], "left": [13, 14, 19], "leftarrow": 13, "len": [0, 2, 3, 13], "length": [9, 13, 15, 17, 19, 21, 27, 28, 29], "less": [2, 13, 19, 20, 27, 28], "less_equ": 13, "less_than": 19, "lessequallayerparam": 19, "lessthanl_ayerparam": 19, "let": [14, 17, 19], "level": [3, 17, 23, 25, 27, 28, 29], "leverag": 27, "libsvm": 8, "libsvm_model": 10, "licens": 17, "lifetim": 17, "like": [2, 9, 13, 14, 17, 19, 27, 29], "likewis": 13, "limit": [9, 12, 13, 14, 17, 19, 29], "linear": [0, 1, 3, 6, 9, 14, 15, 17, 19, 21, 22, 23, 24, 25, 26, 27, 28, 29], "linear_0": 14, "linear_1": [22, 25], "linear_1_weight": [22, 25], "linear_activ": 13, "linear_config": 3, "linear_lut": 19, "linear_model": 15, "linear_quant": [2, 6], "linear_quantize_activ": 24, "linear_quantize_weight": [18, 22, 24], "linear_symmetr": [18, 19, 22, 24], "linear_upsample_mod": 19, "linearli": [21, 22], "linearquant": [2, 29], "linearquantizerconfig": [2, 29], "linearregress": 15, "linearsvc": 15, "linearsvr": 15, "link": 14, "list": [3, 9, 11, 12, 13, 14, 15, 16, 17, 19, 21, 22, 24, 25, 27, 28, 29], "list_gath": 13, "list_length": 13, "list_read": 13, "list_scatt": 13, "list_writ": 13, "llm": 27, "load": [9, 13, 15, 17, 19], "load_calibration_data": [27, 28, 29], "load_const": 19, "load_spec": [17, 19], "loadconstantlayerparam": 19, "loadconstantndlayerparam": 19, "loaded_model": 17, "local": [13, 19, 24], "local_response_norm": 13, "local_s": 19, "locat": [9, 13, 19, 22, 23], "log": [9, 13, 14, 19], "log2": 13, "logarithm": 13, "logic": 19, "logical_and": 13, "logical_not": 13, "logical_or": 13, "logical_xor": 13, "logicalandlayerparam": 19, "logicalnotlayerparam": 19, "logicalorlayerparam": 19, "logist": [15, 17], "logit": 13, "logsoftmax": [0, 2, 3], "logsum": 19, "long": 13, "longer": [13, 19], "look": [0, 9, 13, 19, 21], "lookup": [13, 19, 21, 22, 24, 27], "loop": [14, 19, 27, 29], "loop_bodi": 14, "loop_break": 19, "loop_cond": 14, "loop_continu": 19, "loop_invariant_elimin": 14, "loop_var": [13, 14], "loopbreaklayerparam": 19, "loopcontinuelayerparam": 19, "looplayerparam": 19, "lose": [0, 3], "loss": [0, 2, 3, 14, 19, 27, 28, 29], "loss_": 19, "loss_fn": [27, 28, 29], "lossi": 2, "lot": [22, 23], "low": [13, 22, 24], "lower": [13, 19, 27, 29], "lower_bound": [12, 19], "lower_triangular": 19, "lowertriangularlayerparam": 19, "lowest": [23, 28], "lr": [0, 17, 19], "lrn": 19, "lrnlayerparam": 19, "lsb": 13, "lsq": 29, "lsq_plu": 29, "lstm": [13, 19], "lut": [13, 19, 21, 22, 24, 27], "lut0": 13, "lut1": 13, "lut_dtyp": 27, "lut_funct": [18, 19, 21], "lut_shap": 13, "m": [13, 23, 28], "m1": 17, "m2": 17, "mac": [9, 17], "machin": [10, 17], "maco": [9, 17, 19], "macos10": 9, "macos11": 9, "macos12": [9, 17], "macos13": 9, "made": 14, "magnitud": [4, 6, 23, 25, 26], "magnitude_prun": [3, 4, 6], "magnitudeprun": [3, 28], "magnitudeprunerconfig": [3, 28], "mai": [9, 12, 13, 14, 15, 17, 28, 29], "main": [11, 13, 14, 17, 19], "main_1": 17, "main_2": 17, "maintain": [14, 17], "mainten": 17, "major": [13, 27], "make": [9, 13, 14, 17, 19, 22, 24], "make_int": 24, "make_list": 13, "make_pipelin": 17, "make_st": 17, "make_updat": 19, "makedir": [0, 2, 3], "manag": [9, 17], "mani": [15, 21, 29], "manner": [13, 14, 19, 23], "manual": 17, "map": [0, 12, 13, 14, 15, 17, 19, 21, 22, 24, 25, 27], "mark": 19, "mask": [3, 13, 17, 19, 22, 23, 28], "masked_fil": 13, "massiv": 28, "match": [9, 12, 13, 14, 17, 27], "materi": [14, 17], "materialization_2_3": 17, "materialization_2_5": 14, "materialization_4_5": 17, "materialization_4_7": 14, "materialize_dynamic_shape_mlmodel": 17, "materialize_symbolic_shape_program": [14, 17], "materialized_model": 17, "math": 13, "mathemat": [13, 14], "matmul": [13, 14, 19, 24], "matmul_0": 14, "matric": [3, 13, 19, 28], "matrix": [3, 13, 19, 23, 27, 28, 29], "matrix_band_part": 19, "matrixbandpartlayerparam": 19, "matrixmultiplylayerselector": 19, "matter": 14, "max": [13, 14, 19, 22, 24], "max_box": [13, 19], "max_broadcast": 19, "max_error": 17, "max_index": 13, "max_iter": 19, "max_pool": 13, "max_valu": [17, 19], "maxbroadcastablelayerparam": 19, "maximum": [12, 13, 14, 19, 21, 22, 24, 27, 29], "maximum_input_channel": 19, "maximum_output_channel": 19, "maxpool2d": [0, 2, 3], "maxval": 19, "mb": [4, 6, 11, 14, 22, 25], "md": 14, "mean": [2, 6, 9, 13, 19, 21, 22, 24, 25, 26, 27, 28, 29], "mean_i": 13, "meant": 27, "meanvariancenormalizelayerparam": 19, "mechan": 27, "meld": 14, "mem": [4, 6], "member": 9, "memori": [3, 13, 14, 17, 23, 27, 29], "merg": 14, "merge_affine_dequantize_with_consecutive_op": 14, "merge_chunks_to_pipelin": 17, "merge_consecutive_pad": 14, "merge_consecutive_relu": 14, "merge_consecutive_reshap": 14, "merge_consecutive_transpos": 14, "mergefromstr": 17, "messag": [17, 19], "met": 19, "meta": [22, 25], "meta_data": [22, 25], "metadata": [12, 17, 22, 25], "method": [3, 9, 13, 14, 17, 19, 21, 24, 27, 28, 29], "metric": [17, 19], "middl": 14, "might": 13, "mil": [5, 9, 17, 24, 29], "mil_program": [9, 17], "mileston": [2, 27, 29], "milintern": 9, "milproto": 13, "milspec": 9, "min": [13, 19, 22, 24], "min_broadcast": 19, "min_max": [27, 29], "min_valu": [17, 19], "minbroadcastablelayerparam": 19, "mini": 19, "minim": [17, 28, 29], "minimum": [9, 12, 13, 19, 22, 24, 27, 29], "minimum_conv_kernel_channel": 19, "minimum_conv_weight_count": 19, "minimum_deployment_target": [0, 2, 3, 9], "minimum_input_channel": 19, "minimum_output_channel": 19, "minimum_sparsity_percentil": 23, "minimum_weight_count": 19, "minmaxobserv": 29, "minval": 19, "miss": [9, 17, 27, 28, 29], "missing_value_tracks_true_child": 17, "mkldnn": 14, "ml": [0, 2, 3, 5, 9, 12, 14, 15, 16, 17, 19], "ml_program": 18, "mlmodel": [9, 14, 15, 16, 18, 19, 21, 22, 23, 24, 25], "mlmodelc": 17, "mlmultiarrai": 19, "mlpackag": [0, 2, 3, 9, 17, 21, 22, 23, 24, 25], "mlprogam": 17, "mlprogram": [9, 12, 17, 21, 22, 23, 24], "mlprogram_model": 17, "mlstate": 17, "mm": 17, "mnist": [0, 2, 3], "mnist_data": 0, "mnist_dataset": [0, 2, 3], "mnist_net": [0, 2, 3], "mnist_palett": 0, "mnist_palettization_data": 0, "mnist_pruning_data": 3, "mnist_qat_data": 2, "mobilenet_v2": 9, "mod": 13, "mod_broadcast": 19, "modbroadcastablelayerparam": 19, "mode": [2, 13, 14, 16, 17, 18, 19, 21, 22, 24, 25, 28, 29], "model": [5, 9, 10, 12, 13, 14, 15, 16, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 29], "model_nam": 17, "model_path": [10, 17], "model_pb": [10, 17], "model_pb2": [17, 19], "model_spec": [10, 17], "modelmetr": 19, "modifi": [9, 14, 17, 19], "modul": [0, 3, 5, 9, 17, 27, 28, 29], "modular": 19, "module_nam": [27, 28, 29], "module_name_config": [0, 27, 28, 29], "module_type_config": [27, 28, 29], "moduledkmpalettizerconfig": 27, "modulegptqconfig": [28, 29], "modulelinearquantizerconfig": [2, 29], "modulelist": [28, 29], "modulemagnitudeprunerconfig": [3, 28], "modulenameconfigtyp": 27, "moduleoptimizationconfig": [27, 28, 29], "moduleposttrainingpalettizerconfig": 27, "moduleposttrainingquantizerconfig": 29, "moduleskmpalettizerconfig": 27, "modulesparsegptconfig": [28, 29], "moduletypeconfigtyp": [27, 28, 29], "moment": 19, "momentum": [0, 19], "more": [0, 2, 3, 9, 11, 13, 14, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "most": [9, 13, 17, 19, 27, 28, 29], "move": [13, 14, 19, 27], "moving_average_min_max": 29, "msb": 13, "mse": 29, "mse_loss": 27, "much": 0, "mul": [13, 14, 19], "multi": 19, "multiarrai": [17, 19], "multiclass": 17, "multidimension": 17, "multifunct": [14, 17], "multifunction_model": 17, "multifunctiondescriptor": 17, "multifunctionmodel": 17, "multiheadattent": 27, "multinomi": 13, "multipl": [12, 13, 14, 19, 21, 27], "multipli": [3, 13, 14, 19, 28], "multiply_broadcast": 19, "multiplybroadcastablelayerparam": 19, "must": [9, 12, 13, 14, 17, 19, 21, 23, 24, 27, 28, 29], "mutat": [27, 28, 29], "mvn": 19, "my_compiled_model": 17, "my_compressed_model": [22, 25], "my_core_ml_model": 12, "my_model": [12, 16, 17, 19, 21, 22, 23, 24, 25], "my_model1": 17, "my_model2": 17, "my_model_1": 17, "my_model_2": 17, "my_model_chunk1": 17, "my_model_chunk2": 17, "my_model_chunked_pipelin": 17, "my_model_fil": 17, "my_model_path": 17, "my_multiarray_featurenam": 19, "my_multifunction_model": 17, "my_pipelin": 17, "my_pipeline_model": 17, "mylayerselector": 19, "mymodel": [19, 29], "n": [0, 2, 3, 13, 14, 19, 21, 22, 23, 24, 27, 28, 29], "n_bit": [0, 13, 27], "n_class": 16, "n_m_ratio": [23, 25, 28], "name": [9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 22, 24, 25, 27, 28, 29], "named_buff": [9, 12], "namesanit": 14, "nan": [17, 29], "nativ": 14, "natur": [13, 14], "nbit": [13, 18, 19, 21, 22, 25], "nchu": 13, "nchw": [13, 14, 19], "ndarrai": [12, 17, 21, 22, 24, 25], "nearest": [13, 17, 19], "nearest_neighbor": 13, "necessari": 2, "need": [3, 9, 13, 14, 17, 19, 20, 23, 24, 27, 29], "neg": [13, 14, 17, 19, 29], "neglect": 13, "neighbor": [13, 15, 17], "neighborhood": 19, "neighbour": 19, "neither": 9, "nest": [9, 14, 19, 25, 27, 28, 29], "network": [9, 12, 13, 17, 19, 20, 23, 27, 28, 29], "neural": [2, 3, 9, 12, 13, 17, 19, 20, 23, 27, 28, 29], "neuralnetwork": [9, 12, 17, 19], "neuralnetwork_pb2": 19, "neuralnetworkbuild": 19, "neuralnetworkclassifi": 19, "neuralnetworkimages": 19, "neuralnetworkimageshapemap": 19, "neuralnetworkimagesizerang": 19, "neuralnetworkmultiarrayshap": 19, "neuralnetworkmultiarrayshapemap": 19, "neuralnetworkmultiarrayshaperang": 19, "neuralnetworkregressor": 19, "new": [0, 13, 14, 17, 19, 24, 27, 28, 29], "new_data": 14, "new_feature_nam": 17, "new_h": 13, "new_my_pipelin": 17, "new_n": 13, "new_nam": 17, "new_w": 13, "newer": [9, 17], "newli": 13, "next": [0, 2, 3, 13, 14, 17, 19, 28, 29], "nhwc": [14, 19], "nll_loss": [0, 2, 3], "nm": [13, 19], "nn": [0, 2, 3, 9, 13, 19, 27, 28, 29], "nn_spec": 19, "no_grad": [0, 2, 3], "node": [9, 14, 17], "node_id": 17, "non": [13, 14, 17, 19, 21, 22, 23, 27, 28, 29], "non_const_op": 14, "non_linear": 19, "non_maximum_suppress": 13, "non_traceable_module_nam": 29, "non_zero": 13, "non_zero_data": [22, 23], "none": [0, 9, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 27, 28, 29], "nonmaximumsuppressionlayerparam": 19, "nonzero": 13, "nonzero_data": 13, "noop_elimin": 14, "nor": 9, "norm": [19, 23, 28, 29], "normal": [0, 2, 3, 14, 15, 17, 19, 21, 27, 28, 29], "normalize_vari": 19, "normalized_coordin": 13, "normalized_minus_one_to_on": 13, "normalized_roi": 19, "normalized_shap": 19, "normalized_zero_to_on": 13, "not_equ": [13, 19], "notat": 13, "note": [0, 12, 13, 14, 15, 17, 19, 21, 22, 24, 25, 28, 29], "notebook": [0, 1, 2, 3], "notequallayerparam": 19, "noth": [14, 21, 22, 25, 27, 28, 29], "notic": [13, 14], "notransform": 17, "now": [2, 3, 13, 14, 25, 27], "np": [2, 3, 9, 12, 13, 17, 19, 21, 22, 24, 25], "nrep": 19, "ntest": [0, 2, 3], "nu": 10, "nullifi": 14, "nullify_redundant_quantization_zero_point": 14, "num_class": [0, 2, 3], "num_dimens": [15, 17], "num_epoch": [0, 2, 3], "num_error": 17, "num_kmeans_work": 21, "num_low": 19, "num_of_ref": 13, "num_palett": 13, "num_partit": 27, "num_sampl": 19, "num_split": [13, 19], "num_upd": 13, "num_upp": 19, "number": [9, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "number_of_dimens": 17, "number_of_neighbor": 17, "number_of_neighbors_allowed_rang": 17, "number_of_neighbors_allowed_set": 17, "numberofneighbor": 17, "numer": [9, 13, 14, 17, 29], "numpi": [2, 3, 9, 13, 17, 18, 19, 21, 22, 25], "nusvc": 15, "nusvr": 15, "nwhu": 13, "o": [0, 2, 3, 13], "o1": 14, "o2": 14, "o3": 14, "o4": 14, "o5": 14, "o_t": 13, "object": [9, 10, 12, 14, 17, 19, 21, 22, 23, 24, 25, 28, 29], "object_typ": [27, 28, 29], "obscompressionalgorithm": [28, 29], "observ": [2, 14, 29], "observertyp": 29, "obtain": [28, 29], "occur": [13, 14, 23], "odd": 13, "off": 3, "off_valu": [13, 14, 19], "offer": 29, "offici": 13, "offset": [13, 17, 19, 22, 24], "offset_corn": 13, "offsett": 13, "often": 20, "old": [14, 17, 27, 28, 29], "old_featur": 17, "old_output_var": 14, "older": [9, 12, 17], "omit": 12, "on_valu": [13, 14, 19], "onc": [28, 29], "one": [0, 2, 3, 9, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 27, 28, 29], "one_hot": [13, 14], "one_hot_vector_s": [13, 19], "onehot": 14, "onehotlayerparam": 19, "ones": [13, 22, 23, 28], "onli": [2, 9, 10, 12, 13, 14, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "onward": [19, 29], "op": [2, 5, 9, 11, 14, 17, 21, 22, 23, 24, 25, 29], "op0": 14, "op1": 14, "op2": 14, "op3": 14, "op_config": 25, "op_nam": 25, "op_name_config": 25, "op_selector": [9, 14, 18, 25], "op_typ": [9, 14, 22, 25], "op_type_config": 25, "opactivationlinearquantizerconfig": 24, "opcompressorconfig": 25, "open": 13, "oper": [13, 14, 19, 21, 22, 25, 27, 29], "operand": 14, "oplinearquantizerconfig": [22, 24], "opmagnitudeprunerconfig": [23, 25], "oppalettizerconfig": [21, 22, 25], "opposit": 19, "opset": 11, "opset_vers": 11, "opt": 9, "opt1d": 27, "opt_config": [27, 28, 29], "opthresholdprunerconfig": [22, 23, 25], "optim": [0, 2, 3, 5, 6, 14, 17, 18, 19, 21, 22, 23, 24, 25, 27, 28, 29], "optimization_hint": 17, "optimizationconfig": [21, 22, 23, 24, 25, 27, 28, 29], "option": [9, 12, 13, 14, 15, 17, 19, 21, 24, 27, 28, 29], "optionals_in": 19, "optionals_out": 19, "order": [2, 3, 9, 13, 14, 15, 16, 19, 25, 27, 28, 29], "ordereddict": [0, 2, 3, 27, 28, 29], "organ": 19, "origin": [9, 13, 14, 17, 22, 24, 27, 28, 29], "osh": 19, "other": [0, 2, 3, 13, 14, 17, 19, 22, 24, 25, 27, 29], "other_op": 14, "otherwis": [13, 17, 19, 27, 28, 29], "our": 19, "out": [13, 14, 17, 19, 22, 23, 25, 27, 28, 29], "out1": 14, "out2": 14, "out_2": 14, "outcom": [13, 17], "outer": 14, "outlier": 27, "output": [0, 2, 3, 9, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "output_": 17, "output_0": 17, "output_1": 17, "output_activ": 19, "output_al": 19, "output_channel": 19, "output_channel_block_s": 24, "output_data": 13, "output_descript": 17, "output_dim": 19, "output_dir": 17, "output_dtyp": 13, "output_featur": [17, 19], "output_feature_nam": [15, 17], "output_g": 13, "output_height": [13, 19], "output_indices_dtyp": 13, "output_nam": [17, 19], "output_rank": 19, "output_sequ": 13, "output_shap": [13, 14, 19], "output_tensor": 19, "output_typ": [14, 17], "output_width": [13, 19], "outputchannel": 19, "outputmetr": 19, "outsid": [13, 17, 19], "over": [3, 13, 14, 19, 21, 22, 28], "overal": [15, 19, 21, 22], "overflow": 13, "overlap": [13, 19, 28], "overrid": [19, 25, 27, 28, 29], "overridden": [14, 17], "overview": 20, "own": [13, 29], "p": [0, 13, 14], "p1": 14, "p2": 14, "p_": 13, "p_0": 13, "p_d": 13, "p_f": 19, "p_i": 19, "p_o": 19, "pack": [13, 27], "packag": 17, "package_dir": 9, "package_path": 17, "pad": [0, 2, 3, 13, 14, 19, 23, 28], "pad_amount": 19, "pad_to_given_output_size_mod": 19, "pad_typ": 13, "pad_valu": 13, "padding_back": 19, "padding_bottom": 19, "padding_front": 19, "padding_left": 19, "padding_mod": [13, 19], "padding_right": 19, "padding_top": 19, "padding_typ": 19, "padding_valu": 13, "paddinglayerparam": 19, "pair": [14, 24], "pairwis": 14, "palett": [6, 9, 13, 20, 22, 24, 25, 26], "palett_batch_mod": 27, "palett_cluster_tol": 27, "palett_dist": 27, "palett_epsilon": 27, "palett_lambda": 27, "palett_max_mem": 27, "palett_min_ts": 27, "palett_mod": 27, "palett_shard": 27, "palett_tau": 27, "palett_uniqu": 27, "palettizationgranular": 27, "palettize_weight": [17, 18, 21, 22], "palettized_model": [0, 27], "palettized_weight": [22, 25], "panda": 15, "paper": [3, 28, 29], "parallel": 21, "param": [19, 29], "param_nam": 28, "paramet": [0, 2, 3, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "parametr": 19, "parametricsoftplu": 19, "params_name_map": [22, 25], "parent": [14, 28, 29], "part": 13, "partial": 13, "particular": [13, 29], "partit": [21, 22, 27], "partition_s": 27, "pass": [5, 9, 13, 17, 19, 21, 22, 23, 24, 27, 28, 29], "pass_pipelin": [0, 3, 9, 14], "passpipelin": [0, 3, 9, 14], "passpipelinemanag": 14, "patch": [13, 19], "path": [0, 2, 3, 9, 14, 17, 19, 25, 27, 28, 29], "pattern": [14, 21], "pb": 9, "pd": 15, "peep": 19, "peep_back": 19, "peephol": [13, 19], "peephole_back": 13, "per": [13, 19, 21, 22, 24, 27, 29], "per_block": [24, 28, 29], "per_channel": [24, 28, 29], "per_channel_scaling_factor": 27, "per_channel_scaling_factor_schem": 27, "per_class_suppress": [13, 19], "per_grouped_channel": [21, 27], "per_kernel": 28, "per_scalar": 28, "per_tensor": [21, 24, 27, 28, 29], "percentag": [22, 23, 27], "percentage_palett_en": 27, "percentil": [22, 23, 25], "perform": [13, 14, 17, 19, 21, 22, 23, 24, 27, 28, 29], "perm": [13, 14], "permut": [13, 19, 27], "permutelayerparam": 19, "persist": 17, "perturb": 27, "phase": 14, "pi": [13, 14], "pick": [21, 23, 24, 28], "pipelin": [9, 15, 19], "pipelineclassifi": 17, "pipelineregressor": 17, "pixel": [13, 14], "pixel_shuffl": [13, 14, 19], "pixel_unshuffl": 13, "pixelshuffl": 13, "pixelunshuffl": 13, "place": [9, 13, 14, 17, 27, 28, 29], "placehold": [9, 11, 12], "plane": 19, "platform": [17, 29], "pleas": [13, 14, 17, 18, 22, 25, 28, 29], "plot": 14, "plug": 14, "pn": 14, "point": [2, 3, 12, 13, 14, 17, 19, 21, 22, 24, 27, 28, 29], "polynomialdecayschedul": [3, 28], "pool": [2, 3, 14, 19], "pool1": 0, "pool2": 0, "pooling3dlayerparam": 19, "pooling_typ": 19, "poolinglayerparam": 19, "popul": [14, 17, 19], "posit": [12, 13, 14, 19, 29], "possibl": [9, 13, 14, 17, 21, 29], "possibli": 13, "post": [9, 17, 20, 27, 28, 29], "posttrainingpalett": 27, "posttrainingpalettizerconfig": 27, "posttrainingquant": 29, "posttrainingquantizerconfig": 29, "potenti": [13, 14], "pow": [13, 14], "pow_broadcast": 19, "powbroadcastablelayerparam": 19, "power": [19, 20, 28], "practic": [2, 3, 14, 17], "pre": [3, 19, 28, 29], "preced": 14, "precis": [9, 13, 19, 21, 22, 23, 24, 29], "pred": [0, 2, 3, 13], "predefin": 9, "predic": 13, "predict": [9, 10, 13, 15, 17, 24], "predicted_class": 17, "predicted_feature_nam": [12, 19], "predicted_probabilities_output": 12, "predicted_valu": 17, "predictedfeaturenam": 17, "prediction_blob": 19, "predicton": 17, "prefer": 14, "prefer_state_in_downstream": 14, "prefix": 14, "prelu": [13, 14, 19], "prelu_to_lrelu": 14, "prepar": [0, 2, 3, 27, 28, 29], "prepare_qat_fx": 29, "prepared_model": 0, "prepend": [13, 14], "preprocess": 19, "presenc": 13, "present": [13, 14, 15, 19], "preserv": [9, 14, 29], "preserved_attribut": 29, "prev_spars": 28, "prevent": [9, 17, 19], "previou": [5, 13, 14, 17, 28], "previous": [3, 13, 19], "price": [15, 17], "primarili": 9, "print": [0, 2, 3, 9, 17, 19, 22, 25], "prior": [14, 19], "prob": [13, 19], "probabl": [10, 12, 13, 17, 19], "probt": 13, "process": [9, 14, 15, 17, 19, 21, 22, 24, 28], "processing_group_s": [28, 29], "prod": 19, "produc": [9, 13, 14, 17, 19, 21, 29], "product": [13, 19], "prog": [11, 14], "program": [9, 11, 13, 14, 17, 20], "progress": 19, "prohibit": 14, "project": 17, "promot": 13, "propag": 14, "properti": 17, "proport": 27, "proto": [9, 17, 19], "protobuf": [10, 17, 19], "provid": [0, 2, 3, 9, 12, 13, 14, 16, 17, 19, 21, 22, 23, 24, 25, 27, 29], "prune": [4, 6, 20, 21, 22, 24, 25, 26, 27], "prune_threshold": 27, "prune_weight": [18, 22, 23], "pruned_model": 3, "pruner": 28, "pruning_schedul": 28, "pruningschedul": [3, 28], "pseudo": 14, "psnr": 17, "pt": 9, "ptpalett": 27, "ptq": 29, "pure": 13, "purpos": [9, 14, 17], "put": [17, 25], "py": [0, 2, 3, 4, 6, 9, 14, 17], "pybind": 17, "pymil": [14, 17], "pyorch": 13, "python": [0, 1, 2, 3, 13, 17], "pytorch": [0, 2, 3, 8, 12, 13, 14, 27, 29], "q": [13, 14], "q_embed": 14, "qat": [27, 29], "qint8": [27, 29], "qmodel": 2, "qualifi": [27, 28, 29], "quanatizedlayerselector": 19, "quant_bia": 19, "quant_lut": 19, "quant_max": 27, "quant_min": 27, "quant_scal": 19, "quantiz": [1, 6, 19, 20, 21, 22, 23, 25, 26, 27, 28], "quantization_granular": 28, "quantization_mod": 19, "quantization_op": 13, "quantization_schem": [28, 29], "quantization_typ": 19, "quantization_util": 17, "quantizationgranular": [28, 29], "quantizationschem": [28, 29], "quantize_activ": 27, "quantize_weight": 19, "quantized_data": 13, "quantized_model": [2, 19, 29], "quantizedlayerselector": 19, "queri": 13, "query_length": 17, "quickli": 14, "quint8": 29, "qw": 19, "r": [13, 22, 24], "r_f": 19, "r_i": 19, "r_o": 19, "r_r": 19, "r_t": 13, "r_z": 19, "rais": [9, 13, 14, 17], "rand": [0, 2, 3, 9, 19], "randn": 2, "random": [9, 15, 17, 19], "random_bernoulli": 13, "random_bernoulli_dynam": 19, "random_bernoulli_lik": 19, "random_bernoulli_stat": 19, "random_categor": 13, "random_norm": 13, "random_normal_dynam": 19, "random_normal_lik": 19, "random_normal_stat": 19, "random_uniform": 13, "random_uniform_dynam": 19, "random_uniform_lik": 19, "random_uniform_stat": 19, "randombernoullidynamiclayerparam": 19, "randombernoullilikelayerparam": 19, "randombernoullistaticlayerparam": 19, "randomize_weight": 17, "randomized_mlmodel": 17, "randomnormaldynamiclayerparam": 19, "randomnormallikelayerparam": 19, "randomnormastaticlayerparam": 19, "randomuniformdynamiclayerparam": 19, "randomuniformlikelayerparam": 19, "randomuniformstaticlayerparam": 19, "rang": [0, 2, 3, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 28, 29], "range_1d": 13, "range_dynam": 19, "range_stat": 19, "rangedynamiclayerparam": 19, "rangeparam": 19, "rangestaticlayerparam": 19, "rank": [13, 14, 19, 22, 24, 28], "rank0_expand_dims_swap": 14, "rank4_image_map": 19, "rank_preserving_reshap": 19, "rankpreservingreshapelayerparam": 19, "rate": 19, "rather": [9, 13, 14, 22, 23, 24], "ratio": [23, 28], "raw_op": 13, "rdar": 14, "re": [14, 17, 19], "read": [13, 19, 27], "read_csv": 15, "read_stat": [13, 14], "readi": 9, "readm": 14, "real": [13, 17, 24], "real_div": [9, 13, 14], "realiz": 14, "rearrang": 13, "reason": 14, "reciproc": 13, "recogn": 14, "recommend": [12, 17, 21, 24, 28], "recompos": 14, "recomput": 13, "recompute_scale_factor": 13, "reconstruct": [21, 22], "record": 14, "recov": 13, "rectifi": [13, 19], "recurr": 19, "recurrent_activ": 13, "recurs": 19, "red": [12, 19], "red_bia": 19, "reduc": [13, 14, 19, 20, 21, 22, 27, 28, 29], "reduce_al": 19, "reduce_argmax": 13, "reduce_argmin": 13, "reduce_l1": 19, "reduce_l1_norm": 13, "reduce_l2": 19, "reduce_l2_norm": 13, "reduce_log_sum": 13, "reduce_log_sum_exp": 13, "reduce_logsum": 19, "reduce_logsumexp": [13, 19], "reduce_max": [13, 19], "reduce_mean": [13, 14, 19], "reduce_min": [13, 19], "reduce_prod": [13, 19], "reduce_sum": [13, 14, 19], "reduce_sum_squar": 13, "reduce_sumsquar": 19, "reduce_transpos": 14, "reducel1layerparam": 19, "reducel2layerparam": 19, "reducelayerparam": 19, "reducelogsumexplayerparam": 19, "reducelogsumlayerparam": 19, "reducemaxlayerparam": 19, "reducemeanlayerparam": 19, "reduceminlayerparam": 19, "reduceprodlayerparam": 19, "reducesumlayerparam": 19, "reducesumsquarelayerparam": 19, "reduct": [0, 2, 3, 19, 29], "redund": 14, "ref_tensor": 13, "ref_tensor_i": 13, "refer": [2, 5, 13, 19, 21, 22, 25, 29], "referenc": 17, "reference_output": 17, "reflect": [13, 14, 19], "regardless": 14, "regex": [27, 28, 29], "region": [13, 19], "regist": 14, "register_axis_update_op": 14, "register_buff": 9, "regress": [15, 17], "regression_logist": 17, "regressor": [15, 16, 17, 19], "regular": [2, 3, 14, 19], "reject": 13, "rel": 17, "relat": [19, 27, 28, 29], "relative_hit_r": 17, "relu": [0, 2, 3, 9, 13, 14, 19, 27, 28, 29], "relu1": [0, 27, 28, 29], "relu2": [0, 27, 28, 29], "relu3": 0, "relu6": 13, "remain": [13, 14, 17, 29], "remedi": 13, "remov": [3, 13, 14, 17, 18, 19, 27, 28], "remove_funct": 17, "remove_pass": 9, "remove_redundant_op": 14, "remove_symbolic_reshap": 14, "renam": [14, 17], "rename_featur": [9, 17], "rename_input": 17, "rename_output": 17, "reorder": 14, "reorgan": 19, "rep": [13, 19], "repeat": 19, "repeatedli": [13, 14], "repetit": 19, "replac": [13, 14, 21, 22, 23], "replace_stack_reshap": 14, "replic": [13, 19], "report": [17, 27, 28, 29], "repres": [0, 3, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28], "represent": [9, 10, 13, 14, 17, 19, 21, 22, 23], "reproduc": 13, "requir": [3, 9, 12, 13, 14, 17, 19, 21, 29], "res_var": 11, "resampl": 13, "reset": [13, 19], "reset_aft": 13, "reshap": [13, 14, 19, 28], "reshape_0": 14, "reshape_0_shape_0": 14, "reshape_0_shape_0x": 14, "reshape_1": 14, "reshape_2": 14, "reshape_dynam": 19, "reshape_lik": [13, 19], "reshape_stat": 19, "reshapedynamiclayerparam": 19, "reshapefrequ": 17, "reshapelayerparam": 19, "reshapelikelayerparam": 19, "reshapestaticlayerparam": 19, "resid": 19, "resiz": [13, 19], "resize_bilinear": 13, "resize_nearest_neighbor": 13, "resizebilinear": 13, "resizebilinearlayerparam": 19, "resized_dim": 13, "resolut": [13, 14], "resolv": [14, 15], "resort": 27, "respect": [13, 14, 19, 22, 24, 25, 28, 29], "respons": [13, 19], "rest": [13, 14, 19], "restrict": 13, "result": [9, 13, 14, 16, 17, 19, 22, 23, 24, 25, 27, 28], "ret": 19, "retain": [13, 19], "retriev": [13, 22, 25], "return": [0, 2, 3, 9, 10, 11, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "return_indic": 13, "reus": 14, "revers": [13, 14, 19], "reverse_dim": 19, "reverse_input": 19, "reverse_sequ": 13, "reverselayerparam": 19, "reverseseqlayerparam": 19, "rgb": [9, 12, 19], "ridg": 15, "right": [3, 13, 14, 19], "rightmost": 13, "rm": 13, "rmse": 17, "rnn": 13, "roi": [13, 19], "roi_align_mod": 19, "root": [13, 19], "round": [13, 19, 21, 22, 24], "roundlayerparam": 19, "row": [13, 28, 29], "rsqrt": [13, 19], "rule": 13, "run": [2, 9, 14, 17, 19, 27, 28, 29], "runnabl": 29, "runtim": [9, 12, 13, 14, 21, 22, 24], "s0": 14, "s1": 14, "s_x": 14, "s_y": 14, "s_z": 14, "safe": 13, "sai": 19, "same": [0, 2, 3, 9, 12, 13, 14, 16, 17, 19, 21, 24, 27, 28, 29], "same_low": 13, "same_padding_asymmetry_mod": 19, "sampl": [12, 13, 17, 19, 24, 27, 28, 29], "sample_data": [19, 24], "sample_shap": 12, "sampling_mod": 13, "sanit": 14, "sanitize_input_output_nam": 14, "satisfi": 14, "save": [0, 2, 3, 9, 15, 16, 17, 19, 21, 22, 24], "save_multifunct": 17, "save_path": 17, "save_spec": [17, 19], "savedmodel": 9, "sbchw": 19, "scalar": [13, 14, 17, 19, 21, 22, 24, 27], "scale": [2, 12, 13, 14, 17, 19, 21, 22, 24, 27, 29], "scale_factor_height": 13, "scale_factor_width": 13, "scaled_dot_product_attent": 13, "scaled_tanh": [13, 19], "scalelayerparam": 19, "scaler": 15, "scaler_spec": 17, "scaling_factor_h": 19, "scaling_factor_w": 19, "scatter": [13, 19], "scatter_along_axi": [13, 19], "scatter_nd": 13, "scatteralongaxislayerparam": 19, "scatterlayerparam": 19, "scatterndlayerparam": 19, "scenario": 13, "schedul": 3, "schema": [21, 22, 23, 24], "scheme": 17, "scientif": 14, "scikit": 15, "scope": 14, "score": [13, 15, 17], "score_threshold": [13, 19], "scratch": [17, 19, 23, 28], "search": 17, "seattl": 17, "second": [13, 14, 17, 19, 22, 23, 24, 29], "second_shap": 19, "section": 13, "see": [2, 3, 5, 9, 11, 13, 14, 17, 19, 20, 21, 22, 23, 24, 28, 29], "seed": [13, 19, 27], "select": [13, 14, 17, 19, 21, 28, 29], "select_optim": 14, "selector": 19, "self": [9, 19], "semant": 14, "sensit": 27, "separ": [13, 14, 17, 19], "seq": 19, "seq_axi": [13, 19], "sequenc": [13, 14, 15, 17, 19], "sequence_concat": 19, "sequencerepeatlayerparam": 19, "sequenti": [0, 2, 3, 17, 27, 28, 29], "seri": 21, "serial": [13, 14], "set": [0, 2, 3, 9, 12, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "set_batch": 19, "set_beta1": 19, "set_beta2": 19, "set_categorical_cross_entropy_loss": 19, "set_channel_shap": 19, "set_class_label": 19, "set_default_prediction_valu": 17, "set_ep": 19, "set_glob": [25, 27, 28, 29], "set_height_shap": 19, "set_index_typ": 17, "set_input": 19, "set_lr": 19, "set_mean_squared_error_loss": 19, "set_module_nam": [27, 28, 29], "set_module_typ": [3, 27, 28, 29], "set_momentum": 19, "set_multiarray_ndshape_rang": 19, "set_number_of_neighbors_with_bound": 17, "set_op_nam": 25, "set_op_typ": 25, "set_opt": [9, 14], "set_optional_input": 19, "set_output": [11, 19], "set_post_evaluation_transform": 17, "set_pre_processing_paramet": 19, "set_training_input": [17, 19], "set_width_shap": 19, "sever": 14, "sgd": [0, 19, 27], "sgdparam": 19, "shape": [0, 2, 3, 9, 11, 13, 14, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "shape_bia": 19, "shape_rang": 19, "shape_scal": 19, "shard": 27, "share": [17, 19, 24, 27, 29], "shift": [13, 14, 19], "short": 13, "short_descript": 17, "shorten": 14, "shot": 28, "should": [3, 9, 13, 14, 15, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "show": [0, 2, 3], "shown": 9, "shuffl": [0, 2, 3, 14], "shufflenet": 14, "side": [13, 19], "sigmoid": [13, 19], "sigmoid_approxim": [13, 19], "sigmoid_hard": [13, 19], "sign": [13, 19, 29], "signal": [0, 3], "signific": [2, 13], "signlayerparam": 19, "silu": 13, "similar": [13, 14, 19, 21, 22, 24, 28, 29], "similarli": [9, 13], "simpl": [0, 2, 3, 19], "simpler": 14, "simplerecurrentlayerparam": 19, "simpli": [0, 14], "simplifi": 13, "simul": [2, 29], "simultan": 14, "sin": [13, 19], "sinc": [9, 13, 14, 17, 28], "sine": [13, 19], "singl": [2, 3, 11, 13, 14, 15, 17, 19, 24, 27, 28, 29], "singleton": 11, "sinh": [13, 19], "sinhlayerparam": 19, "sink": 9, "sinlayerparam": 19, "size": [13, 14, 15, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "size_of_weight_tensor": [23, 28], "size_rang": 19, "sizess": 19, "sk_obj": 15, "skip": [9, 14, 19, 21, 24, 25], "skip_const_by_s": [9, 14], "skip_layer_typ": 19, "skip_model_load": [9, 17], "skip_ops_by_typ": 14, "skip_real_div_op": 9, "sklearn": 8, "skmpalett": 27, "skmpalettizerconfig": 27, "slice": [13, 14, 19], "slice_by_index": [13, 14], "slice_by_s": 13, "slice_dynam": 19, "slice_stat": 19, "slice_upd": 13, "slicebysizelayerparam": 19, "slicedynamiclayerparam": 19, "slicelayerparam": 19, "slicestaticlayerparam": 19, "slide": [13, 19], "sliding_window": [13, 19], "slidingwindowslayerparam": 19, "slope": 19, "small": [13, 19], "smaller": [0, 3, 13, 14, 17, 23, 28], "smallest": [19, 21, 23, 28], "smooth": 13, "snippet": 9, "so": [13, 14, 17, 21, 22, 23, 25, 27, 28, 29], "softmax": [0, 2, 3, 9, 12, 13, 17, 19, 27], "softmax_nd": 19, "softmaxlayerparam": 19, "softmaxndlayerparam": 19, "softplu": [13, 19], "softplus_parametr": 13, "softsign": [13, 19], "solv": [14, 22, 24], "some": [9, 13, 14, 17, 19, 24, 28, 29], "some_op": 14, "some_other_op": 14, "sort": [13, 19, 21, 28, 29], "sourc": [0, 1, 2, 3, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24, 25, 27, 28, 29], "source_function_nam": [14, 17], "source_model": 9, "source_v": 13, "space": [13, 19, 20], "space_to_batch": 13, "space_to_depth": [13, 19], "spare": 13, "sparisti": 23, "spars": [3, 9, 13, 17, 21, 22, 23, 25, 27, 28, 29], "sparse_gpt": 28, "sparse_to_dens": 13, "sparse_weight": [22, 25], "sparsegpt": 29, "sparsif": [3, 13, 23], "sparsifi": [22, 23, 28], "sparsify_weight": [17, 18], "sparsiti": [3, 13, 22, 23, 25, 28], "sparsity_t": 28, "spatial": [13, 19], "spatial_scal": [13, 19], "spec": [10, 17, 19], "special": [13, 19], "specializationstrategi": 17, "specif": [5, 13, 14, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "specifi": [2, 3, 9, 12, 13, 14, 15, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "specificationvers": 17, "speed": [28, 29], "sphinx": [0, 1, 2, 3], "split": [13, 17, 19], "split_siz": [13, 19], "splitlayerparam": 19, "splitndlayerparam": 19, "spot": 3, "sqrt": [13, 14, 19], "squar": [13, 14, 17, 19], "square_last": 14, "squeez": [13, 14, 19], "squeeze_al": 19, "squeeze_mask": [13, 19], "squeezelayerparam": 19, "squeezellm": 27, "src_function_nam": 17, "srct": 13, "srqt": 14, "ssa": 13, "ssa_fun": 11, "st": [13, 28], "stabil": 13, "stabl": 13, "stack": [13, 14, 19], "stacklayerparam": 19, "stage": 14, "standard": [13, 15, 19], "start": [2, 13, 14, 19], "start_index": [19, 28], "state": [3, 9, 12, 14, 17, 19, 27, 28, 29], "state_1": 9, "statetyp": 9, "static": 11, "statist": [2, 24, 27, 28, 29], "stddev": [13, 19], "step": [0, 2, 3, 13, 14, 19, 27, 28, 29], "step_count": 28, "step_siz": 28, "still": [14, 29], "stochast": 19, "stop": [2, 19, 27, 29], "storag": [9, 13, 20, 22, 24, 28], "store": [12, 13, 14, 17, 19, 21, 22, 23, 24, 25, 29], "str": [9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 21, 22, 24, 25, 27, 28, 29], "stream": [27, 28, 29], "strict": 13, "strict_align_corn": 13, "strict_align_endpoints_mod": 19, "stride": [0, 2, 3, 13, 19], "stride_depth": 19, "stride_height": 19, "stride_width": 19, "string": [12, 13, 14, 15, 17, 19, 22, 24, 25, 27, 28, 29], "structur": [23, 25, 27, 28, 29], "style": 13, "sub": [13, 14, 19], "subgraph": 17, "submodel": 17, "submodul": [3, 27, 28, 29], "subsequ": 14, "subset": [13, 14], "subtract": [17, 19], "subtract_broadcast": 19, "subtractbroadcastablelayerparam": 19, "sum": [0, 2, 3, 13, 17, 19], "sum_": 19, "sum_j": [13, 17], "summari": 19, "summat": 19, "sumsquar": 19, "super": [9, 19], "superset": 13, "suppli": 13, "support": [2, 9, 10, 12, 13, 14, 15, 17, 19, 22, 24, 25, 27, 28, 29], "suppos": 14, "suppport": 24, "suppress": [9, 13, 19], "sure": 14, "surviv": 13, "svc": [10, 15], "svm": 10, "svr": [10, 15], "swap": 14, "sweet": 3, "swish": 13, "switch": [13, 29], "symbol": [12, 13, 14, 17, 29], "symbolic_shape_mlmodel": 14, "symbolic_shape_prog": 14, "symmetr": [13, 22, 24, 28, 29], "t": [0, 13, 14, 17, 27, 28, 29], "t1": 14, "t2": 14, "tabl": [0, 13, 19, 21, 22, 24, 27], "tag": 13, "take": [13, 14, 17, 19, 21, 22, 23, 24, 29], "taken": 19, "tan": [13, 19], "tangent": [13, 19], "tanh": [13, 14, 19], "tanh_approxim": [13, 14, 19], "tanhlayerparam": 19, "tanlayerparam": 19, "target": [0, 2, 3, 9, 10, 11, 13, 16, 17, 19, 22, 24, 27, 28, 29], "target_function_nam": 17, "target_height": [13, 19], "target_percentil": 18, "target_shap": 19, "target_size_height": 13, "target_size_width": 13, "target_spars": [3, 23, 28], "target_width": [13, 19], "task": [3, 27], "techniqu": [22, 23, 29], "tell": 13, "temperatur": [19, 27], "temporari": [9, 17], "tenor": 13, "tensor": [2, 3, 9, 12, 13, 14, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "tensor_1": 13, "tensor_2": 13, "tensorflow": [8, 12, 14, 17], "tensorspec": 11, "tensortyp": [0, 2, 3, 9], "term": [13, 19, 27], "termin": [17, 19], "test": [0, 2, 3, 17], "test_dataset": [0, 2, 3], "test_input": [9, 17], "test_load": [0, 2, 3], "test_loss": [0, 2, 3], "test_pass": 14, "testcastoptim": 14, "testremoveredundantopspass": 14, "text": 13, "tf": [9, 13, 14], "th": [13, 19, 22, 25, 28], "than": [3, 9, 13, 14, 19, 21, 22, 23, 24, 25, 27, 28, 29], "thei": [3, 9, 13, 14, 15, 17, 19, 27, 28, 29], "them": [0, 2, 3, 13, 14, 15, 17, 19, 21, 27, 29], "themselv": [13, 17], "therebi": 28, "therefor": [13, 14], "thi": [0, 2, 3, 5, 9, 11, 12, 13, 14, 15, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "third": [14, 23, 28, 29], "those": [9, 13, 19, 24, 29], "thought": 28, "three": [19, 22, 25, 29], "threshold": [13, 14, 18, 19, 21, 22, 23, 24, 25, 27, 28], "threshold_bas": 18, "thresholded_relu": 13, "thresholdedrelu": 19, "through": [2, 3, 9, 14, 17, 27, 28, 29], "throughout": [13, 28], "thrown": 19, "thu": [13, 14, 29], "ti": 13, "tile": [13, 19], "tilelayerparam": 19, "time": [0, 2, 3, 13, 14, 19, 27, 29], "tloss": [0, 2, 3], "tm": 17, "tmp": 17, "tn": 14, "todo": 14, "togeth": 21, "toler": 27, "tolist": 21, "too": [9, 14], "tool": [0, 2, 3, 5, 17], "top": [13, 15, 19, 21, 27, 28, 29], "top_k": 21, "top_left_heavi": 19, "topk": [13, 19], "topklayerparam": 19, "topolog": 14, "topological_reord": 14, "torch": [0, 2, 3, 6, 9, 13, 17, 27, 28, 29], "torch_model": [9, 17], "torchscript": [9, 12], "torchvis": [0, 2, 3, 9, 14], "total": [4, 6, 13, 17, 19, 21, 22, 23, 24, 25, 28], "totensor": [0, 2, 3], "touch": 14, "toward": 13, "trace": [0, 2, 3, 9, 29], "traced_model": [0, 2, 3, 9], "track": 14, "trade": 3, "train": [15, 16, 17, 19, 20, 23, 27, 28, 29], "train_dataset": [0, 2, 3], "train_load": [0, 2, 3], "train_step": [0, 2, 3], "trainabl": 19, "training_featur": [17, 19], "training_input": [17, 19], "trait": 14, "transfer": 14, "transform": [0, 2, 3, 9, 14, 15, 17, 27, 28, 29], "transform_matrix": 13, "transformaxisupdateop": 14, "translat": [14, 17], "transpos": [13, 14, 19], "transpose_a": 19, "transpose_b": 19, "transpose_i": [13, 14, 24], "transpose_op_to_axis_update_op": 14, "transpose_op_to_cancel_op": 14, "transpose_op_to_materialize_op": 14, "transpose_x": [13, 14], "transposed_i": 14, "transposed_x": 14, "transposelayerparam": 19, "transposeoptimizationpass": 14, "travers": [14, 17], "treat": [13, 14], "tree": [15, 16, 17], "tree_id": 17, "treeensemblebas": 17, "treeensembleclassifi": 17, "treeensembleregressor": 17, "triangl": [13, 19], "triangular": [13, 19], "trigger": [11, 23], "trivial": 14, "true": [0, 2, 3, 9, 12, 13, 14, 16, 17, 19, 21, 27, 28, 29], "true_child_id": 17, "truth": 13, "try": [0, 14, 17], "tune": 27, "tupl": [9, 12, 13, 14, 15, 17, 19, 23, 24, 27, 28, 29], "turn": [9, 29], "tutori": [0, 2, 3], "tvos13": 9, "tvos14": 9, "tvos15": 9, "twice": 19, "two": [3, 13, 14, 17, 19, 22, 23, 25, 27, 28, 29], "tx_0": 14, "ty_0": 14, "type": [5, 9, 10, 13, 14, 15, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "type_infer": 14, "type_int": 24, "typic": [12, 13, 19], "u": [13, 14, 17], "u8": 27, "ui16": 13, "ui8": 13, "uint1": 13, "uint16": 13, "uint2": 13, "uint3": 13, "uint32": 13, "uint4": [13, 22, 24, 29], "uint6": 13, "uint8": [13, 14, 22, 24, 27, 28, 29], "un": 28, "unalign_corn": 13, "unari": [14, 19], "unaryfunctionlayerparam": 19, "unbound": 19, "undefin": 13, "under": 17, "underdetermin": 13, "underflow": 13, "underli": [17, 21], "understand": 13, "uneven": 13, "uni": [13, 19], "unidirectionallstmlayerparam": 19, "unifi": [8, 17], "uniform": [13, 17, 19, 21, 22, 25], "uniformli": [13, 14, 19], "unint16": 13, "union": [13, 19], "uniqu": [14, 21, 22, 25, 27], "unique_el": 21, "unique_valu": [22, 25], "unit": [9, 13, 17, 19, 28], "unittest": 14, "unless": [13, 24], "unlik": 17, "unnecessari": 14, "unnorm": [13, 19], "unpack": 27, "unpalett": 0, "unprun": 3, "unquant": [2, 13], "unquantized_data": 13, "unsign": 29, "unstructur": [13, 28], "unstructured_weight_spars": 28, "unsupport": 9, "until": 13, "unus": 14, "up": [0, 9, 13, 14, 19, 21, 22, 24, 28, 29], "upcast": 14, "updat": [9, 13, 14, 17, 19, 28, 29], "update_image_size_rang": 19, "update_multiarray_shape_rang": 19, "update_optimizer_util": 17, "update_output_dtyp": 14, "update_step": [3, 28], "updatebuffermodel": 9, "upon": [13, 14, 17], "upper": [12, 13, 19], "upper_bound": [12, 19], "upper_triangular": 19, "uppertriangularlayerparam": 19, "upsampl": [13, 19], "upsample_bilinear": 13, "upsample_mod": 19, "upsample_nearest_neighbor": 13, "upsamplelayerparam": 19, "upscal": 13, "upscale_factor": [13, 14], "upstream": 14, "upward": 13, "us": [2, 3, 6, 9, 10, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29], "usag": [14, 29], "use_activation_order_heurist": 29, "use_bottom_k": 19, "use_cach": [28, 29], "use_float_arraytyp": 19, "use_greater_than_equ": 19, "use_less_than_equ": 19, "use_reflection_pad": 14, "user": [13, 14, 19, 21, 24, 29], "usual": [12, 13, 14, 27], "util": [0, 2, 3, 9, 19, 20, 21, 22, 23, 24], "v": [13, 14, 17, 19, 21, 22, 25], "v3": 7, "v4": 7, "v6": 7, "v_max": 21, "v_min": 21, "val": [13, 14, 22, 25], "valid": [2, 3, 12, 13, 17, 19], "validate_indic": 13, "valu": [2, 3, 9, 12, 13, 14, 15, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "value_1": 29, "value_2": 29, "valueerror": [9, 14], "var": [9, 13, 14, 17], "variabl": [13, 14, 19, 21], "variad": 13, "varianc": [2, 13, 19, 29], "variance_i": 13, "variant": 14, "variat": [28, 29], "variou": 28, "vd": 13, "vector": [10, 13, 14, 17, 19, 21, 22, 23, 24, 27, 28], "vector_axi": 13, "vector_s": 13, "verbos": [17, 19], "veri": [0, 2, 3, 19], "version": [2, 5, 9, 11, 13, 14, 17, 19, 27], "vertic": 19, "via": 29, "view": 14, "view_a": [0, 2, 3], "visual": [9, 13], "vocab_s": 19, "vocabulari": 19, "volum": [13, 17], "volumetr": 14, "w": [9, 13, 14, 19, 22, 24, 28], "w1": 13, "w2": 13, "w_": 13, "w_center": [13, 19], "w_end": [13, 19], "w_f": 19, "w_h": 19, "w_h_back": 19, "w_i": 19, "w_in": [13, 19], "w_o": 19, "w_q": [22, 24], "w_r": [19, 22, 24], "w_start": [13, 19], "w_x": 19, "w_x_back": 19, "w_z": 19, "wa": [14, 27, 28, 29], "wai": [12, 13], "want": [3, 14, 17, 25, 27], "warn": 9, "wast": 14, "watchos6": 9, "watchos7": 9, "watchos8": 9, "we": [9, 13, 14, 21, 23, 25, 27, 28, 29], "weigh": 27, "weigh_ih": 13, "weight": [2, 3, 9, 13, 14, 17, 19, 20, 21, 22, 23, 24, 25, 27, 28, 29], "weight_config": 24, "weight_dtyp": [28, 29], "weight_hh": 13, "weight_hh_back": 13, "weight_ih": 13, "weight_ih_back": 13, "weight_k": 14, "weight_matrix_column": 19, "weight_matrix_row": 19, "weight_metadata_dict": [22, 25], "weight_observ": 29, "weight_param": 19, "weight_per_channel": 29, "weight_q": 14, "weight_threshold": [21, 22, 23, 24, 25, 27], "weighting_schem": 17, "weightparam": 19, "weights_dir": 17, "well": [14, 19, 29], "what": [14, 17, 27], "when": [0, 9, 12, 13, 14, 16, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "whenev": [14, 17], "where": [13, 14, 15, 17, 19, 21, 22, 23, 24, 27, 28, 29], "where_broadcast": 19, "where_nonzero": 19, "wherea": [14, 19, 28], "wherebroadcastablelayerparam": 19, "wherenonzerolayerparam": 19, "whether": [13, 14, 15, 19, 21, 22, 23, 24, 29], "which": [0, 2, 3, 9, 12, 13, 14, 15, 17, 19, 21, 22, 23, 24, 25, 27, 28, 29], "whichoneof": 17, "while": [13, 14, 19, 22, 24, 25, 28, 29], "while_loop": [13, 14], "whole": [14, 27, 29], "whose": [13, 14, 19, 22, 25], "whr": 13, "why": 14, "width": [9, 13, 19], "width_rang": 19, "window": [13, 19], "window_s": 19, "wise": [13, 14, 19], "within": [11, 13, 14, 17, 19, 28], "without": [9, 14, 19], "won": 13, "word": 19, "work": [12, 13, 19, 27], "workdata": 17, "worker": 21, "would": [0, 12, 13, 14, 17, 19, 21, 22, 23, 25, 27, 28, 29], "wouldn": 28, "wp": 19, "wrap": [12, 13, 14, 19], "wrapped_typ": [9, 12], "wrapper": [12, 17, 25], "wreck": 14, "write": 13, "write_st": [13, 14], "x": [9, 11, 13, 14, 17, 19, 23, 28], "x1": [14, 19], "x1_t": 14, "x2": 14, "x3": 14, "x3_t": 14, "x4": 14, "x5": 14, "x6": 14, "x7": 14, "x8": 14, "x_fp": 14, "x_i": [13, 17], "x_j": [13, 17], "x_shift": 14, "x_t": 13, "xgboost": [8, 17], "xin": [13, 19], "xm": 13, "xor": [13, 19], "xout": [13, 19], "xpu": 14, "xx": 13, "y": [9, 11, 13, 14, 17, 19, 24], "y11": 14, "y12": 14, "y13": 14, "y14": 14, "y21": 14, "y22": 14, "y23": 14, "y_fp": 14, "y_i": 13, "y_shift": 14, "yaml": [25, 27, 28, 29], "ym": 13, "yml": [25, 27, 28, 29], "you": [0, 2, 3, 9, 12, 13, 14, 17, 20, 22, 24, 25, 27, 28], "your": [0, 2, 3, 17, 27, 29], "z": [13, 14, 22, 24], "z0": 14, "z_": 14, "z_fp": 14, "z_fp_modifi": 14, "z_t": 13, "za": 14, "zero": [2, 13, 14, 19, 21, 22, 23, 24, 27, 28, 29], "zero_grad": [0, 2, 3], "zero_point": [13, 14, 24], "zero_threshold": 27, "zeropointt": 13, "zip": [0, 1, 2, 3], "zp": 14, "zp_x": 14, "zp_y": 14, "zp_z": 14}, "titles": ["Palettization Using Differentiable K-Means", "<no title>", "Linear Quantization", "Magnitude Pruning", "Computation times", "coremltools API", "Computation times", "Previous Versions", "Converters", "Unified (TensorFlow and Pytorch)", "LibSVM", "MIL Builder", "MIL Input Types", "MIL Ops", "MIL Graph Passes", "SKLearn", "XGBoost", "Model APIs", "<no title>", "neural_network.builder", "Optimizers", "Palettization", "Post-Training Compression", "Pruning", "Quantization", "Utilities", "Examples", "Palettization", "Pruning", "Quantization"], "titleterms": {"15": 13, "16": 13, "17": 13, "18": 13, "On": [0, 2, 3], "activ": 13, "api": [5, 17], "array_feature_extractor": 17, "builder": [11, 19], "classifi": 13, "classifierconfig": 12, "cleanup": 14, "compil": 17, "compress": 22, "compression_util": 17, "comput": [4, 6], "configur": 0, "constexpr_op": 13, "content": 5, "control_flow": 13, "conv": 13, "convert": 8, "core": 20, "coreml_update_st": 13, "coremltool": 5, "dataset": [0, 2, 3], "defin": 0, "definit": [2, 3], "devic": [0, 2, 3], "differenti": 0, "elementwise_binari": 13, "elementwise_unari": 13, "enumeratedshap": 12, "exampl": 26, "execut": [0, 2, 3], "export": [0, 2, 3], "extract_submodel": 17, "feature_vector": 17, "final": [2, 3], "fine": [0, 2, 3], "flexible_shape_util": 19, "gptq": 29, "graph": 14, "image_res": 13, "imagetyp": 12, "indic": 0, "input": 12, "inputtyp": 12, "insert": 2, "instal": 3, "io": 13, "k": 0, "layer": 2, "libsvm": 10, "linear": [2, 13], "lut": 0, "magnitud": [3, 28], "mean": 0, "mil": [11, 12, 13, 14], "ml": 20, "mlmodel": 17, "model": [0, 2, 3, 17], "nearest_neighbor": 17, "network": [0, 2, 3], "neural_network": [17, 19], "normal": 13, "op": 13, "optim": 20, "optimize_activ": 14, "optimize_conv": 14, "optimize_elementwise_binari": 14, "optimize_linear": 14, "optimize_norm": 14, "optimize_quant": 14, "optimize_repeat_op": 14, "optimize_st": 14, "optimize_tensor_oper": 14, "palett": [0, 21, 27], "pass": 14, "pipelin": 17, "pool": 13, "post": 22, "preprocess": 14, "previou": 7, "prune": [3, 23, 28], "pruner": 3, "pytorch": [9, 20], "quantiz": [2, 13, 14, 24, 29], "quantization_util": 19, "random": 13, "rangedim": 12, "recurr": 13, "reduct": 13, "resourc": 5, "restor": 0, "scatter_gath": 13, "schedul": 28, "shape": 12, "sklearn": 15, "sparsegpt": 28, "state": 13, "statetyp": 12, "symbol_transform": 14, "tensor_oper": 13, "tensor_transform": 13, "tensorflow": 9, "tensortyp": 12, "time": [4, 6], "train": [0, 2, 3, 22], "transform": 13, "tree_ensembl": 17, "tune": [0, 2, 3], "type": 12, "unifi": 9, "update_optimizer_util": 19, "us": 0, "util": [17, 25], "version": 7, "weight": 0, "without": [0, 2, 3], "xgboost": 16}}) \ No newline at end of file diff --git a/docs/_build/html/_downloads/11f835b4614af54296e27c778e9687b8/dkm_palettization.py b/docs/_downloads/11f835b4614af54296e27c778e9687b8/dkm_palettization.py similarity index 100% rename from docs/_build/html/_downloads/11f835b4614af54296e27c778e9687b8/dkm_palettization.py rename to docs/_downloads/11f835b4614af54296e27c778e9687b8/dkm_palettization.py diff --git a/docs/_build/html/_downloads/122dbbcfab949f2af9f9aa0f903434d3/magnitude_pruning.ipynb b/docs/_downloads/122dbbcfab949f2af9f9aa0f903434d3/magnitude_pruning.ipynb similarity index 99% rename from docs/_build/html/_downloads/122dbbcfab949f2af9f9aa0f903434d3/magnitude_pruning.ipynb rename to docs/_downloads/122dbbcfab949f2af9f9aa0f903434d3/magnitude_pruning.ipynb index a75d76282..b4c9f29d4 100644 --- a/docs/_build/html/_downloads/122dbbcfab949f2af9f9aa0f903434d3/magnitude_pruning.ipynb +++ b/docs/_downloads/122dbbcfab949f2af9f9aa0f903434d3/magnitude_pruning.ipynb @@ -229,7 +229,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.14" + "version": "3.10.15" } }, "nbformat": 4, diff --git a/docs/_build/html/_downloads/324c76d72de4ccd8db63a09d4b2f0f27/_examples_python.zip b/docs/_downloads/324c76d72de4ccd8db63a09d4b2f0f27/_examples_python.zip similarity index 98% rename from docs/_build/html/_downloads/324c76d72de4ccd8db63a09d4b2f0f27/_examples_python.zip rename to docs/_downloads/324c76d72de4ccd8db63a09d4b2f0f27/_examples_python.zip index 37d7a0b60..3786c860e 100644 Binary files a/docs/_build/html/_downloads/324c76d72de4ccd8db63a09d4b2f0f27/_examples_python.zip and b/docs/_downloads/324c76d72de4ccd8db63a09d4b2f0f27/_examples_python.zip differ diff --git a/docs/_build/html/_downloads/3c0d8a29b2f057cd52c92d618498f1b8/_examples_jupyter.zip b/docs/_downloads/3c0d8a29b2f057cd52c92d618498f1b8/_examples_jupyter.zip similarity index 98% rename from docs/_build/html/_downloads/3c0d8a29b2f057cd52c92d618498f1b8/_examples_jupyter.zip rename to docs/_downloads/3c0d8a29b2f057cd52c92d618498f1b8/_examples_jupyter.zip index 73fc9218b..96da46ab0 100644 Binary files a/docs/_build/html/_downloads/3c0d8a29b2f057cd52c92d618498f1b8/_examples_jupyter.zip and b/docs/_downloads/3c0d8a29b2f057cd52c92d618498f1b8/_examples_jupyter.zip differ diff --git a/docs/_build/html/_downloads/538c03374b600fb7b4c95773e0e48cc8/linear_quantization.zip b/docs/_downloads/538c03374b600fb7b4c95773e0e48cc8/linear_quantization.zip similarity index 98% rename from docs/_build/html/_downloads/538c03374b600fb7b4c95773e0e48cc8/linear_quantization.zip rename to docs/_downloads/538c03374b600fb7b4c95773e0e48cc8/linear_quantization.zip index 14c994215..fa5a1be9e 100644 Binary files a/docs/_build/html/_downloads/538c03374b600fb7b4c95773e0e48cc8/linear_quantization.zip and b/docs/_downloads/538c03374b600fb7b4c95773e0e48cc8/linear_quantization.zip differ diff --git a/docs/_build/html/_downloads/7772d27e9f0aa23fca60d8f87774b59e/magnitude_pruning.py b/docs/_downloads/7772d27e9f0aa23fca60d8f87774b59e/magnitude_pruning.py similarity index 100% rename from docs/_build/html/_downloads/7772d27e9f0aa23fca60d8f87774b59e/magnitude_pruning.py rename to docs/_downloads/7772d27e9f0aa23fca60d8f87774b59e/magnitude_pruning.py diff --git a/docs/_build/html/_downloads/8b4a48b882e18960d6cbb43608d396d6/dkm_palettization.zip b/docs/_downloads/8b4a48b882e18960d6cbb43608d396d6/dkm_palettization.zip similarity index 98% rename from docs/_build/html/_downloads/8b4a48b882e18960d6cbb43608d396d6/dkm_palettization.zip rename to docs/_downloads/8b4a48b882e18960d6cbb43608d396d6/dkm_palettization.zip index 1f074f966..ffc488db5 100644 Binary files a/docs/_build/html/_downloads/8b4a48b882e18960d6cbb43608d396d6/dkm_palettization.zip and b/docs/_downloads/8b4a48b882e18960d6cbb43608d396d6/dkm_palettization.zip differ diff --git a/docs/_build/html/_downloads/a7b4033a3801c85fde7f868c7e61d373/magnitude_pruning.zip b/docs/_downloads/a7b4033a3801c85fde7f868c7e61d373/magnitude_pruning.zip similarity index 98% rename from docs/_build/html/_downloads/a7b4033a3801c85fde7f868c7e61d373/magnitude_pruning.zip rename to docs/_downloads/a7b4033a3801c85fde7f868c7e61d373/magnitude_pruning.zip index 08b422ad0..04c06e901 100644 Binary files a/docs/_build/html/_downloads/a7b4033a3801c85fde7f868c7e61d373/magnitude_pruning.zip and b/docs/_downloads/a7b4033a3801c85fde7f868c7e61d373/magnitude_pruning.zip differ diff --git a/docs/_build/html/_downloads/bc2a7018a863cd1cadb3fbeb26a90a66/linear_quantization.py b/docs/_downloads/bc2a7018a863cd1cadb3fbeb26a90a66/linear_quantization.py similarity index 100% rename from docs/_build/html/_downloads/bc2a7018a863cd1cadb3fbeb26a90a66/linear_quantization.py rename to docs/_downloads/bc2a7018a863cd1cadb3fbeb26a90a66/linear_quantization.py diff --git a/docs/_build/html/_downloads/bd67eae74db5a0960d9f9c236fcbbcc3/dkm_palettization.ipynb b/docs/_downloads/bd67eae74db5a0960d9f9c236fcbbcc3/dkm_palettization.ipynb similarity index 99% rename from docs/_build/html/_downloads/bd67eae74db5a0960d9f9c236fcbbcc3/dkm_palettization.ipynb rename to docs/_downloads/bd67eae74db5a0960d9f9c236fcbbcc3/dkm_palettization.ipynb index a90e93dad..7b6e51d1d 100644 --- a/docs/_build/html/_downloads/bd67eae74db5a0960d9f9c236fcbbcc3/dkm_palettization.ipynb +++ b/docs/_downloads/bd67eae74db5a0960d9f9c236fcbbcc3/dkm_palettization.ipynb @@ -193,7 +193,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.14" + "version": "3.10.15" } }, "nbformat": 4, diff --git a/docs/_examples/linear_quantization.ipynb b/docs/_downloads/c276a0975534ab93174d95e83f81c9e7/linear_quantization.ipynb similarity index 99% rename from docs/_examples/linear_quantization.ipynb rename to docs/_downloads/c276a0975534ab93174d95e83f81c9e7/linear_quantization.ipynb index 6639773e5..aae9580ab 100644 --- a/docs/_examples/linear_quantization.ipynb +++ b/docs/_downloads/c276a0975534ab93174d95e83f81c9e7/linear_quantization.ipynb @@ -211,7 +211,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.14" + "version": "3.10.15" } }, "nbformat": 4, diff --git a/docs/_examples/_examples_jupyter.zip b/docs/_examples/_examples_jupyter.zip deleted file mode 100644 index 73fc9218b..000000000 Binary files a/docs/_examples/_examples_jupyter.zip and /dev/null differ diff --git a/docs/_examples/_examples_python.zip b/docs/_examples/_examples_python.zip deleted file mode 100644 index 37d7a0b60..000000000 Binary files a/docs/_examples/_examples_python.zip and /dev/null differ diff --git a/docs/_examples/dkm_palettization.codeobj.json b/docs/_examples/dkm_palettization.codeobj.json deleted file mode 100644 index 36284b858..000000000 --- a/docs/_examples/dkm_palettization.codeobj.json +++ /dev/null @@ -1,290 +0,0 @@ -{ - ".palettization.DKMPalettizer.prepare": [ - { - "is_class": false, - "is_explicit": true, - "module": ".palettization.DKMPalettizer", - "module_short": ".palettization.DKMPalettizer", - "name": "prepare" - } - ], - ".palettization.Palettizer.finalize": [ - { - "is_class": false, - "is_explicit": true, - "module": ".palettization.Palettizer", - "module_short": ".palettization.Palettizer", - "name": "finalize" - } - ], - ".palettizer.DKMPalettizer": [ - { - "is_class": false, - "is_explicit": true, - "module": ".palettizer", - "module_short": ".palettizer", - "name": "DKMPalettizer" - } - ], - "DKMPalettizer": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools.optimize.torch.palettization", - "module_short": "coremltools.optimize.torch.palettization", - "name": "DKMPalettizer" - } - ], - "DKMPalettizerConfig.from_dict": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools.optimize.torch.palettization.DKMPalettizerConfig", - "module_short": "coremltools.optimize.torch.palettization.DKMPalettizerConfig", - "name": "from_dict" - } - ], - "F.nll_loss": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn.functional", - "module_short": "torch.nn.functional", - "name": "nll_loss" - } - ], - "OrderedDict": [ - { - "is_class": false, - "is_explicit": false, - "module": "collections", - "module_short": "collections", - "name": "OrderedDict" - } - ], - "ct.PassPipeline.DEFAULT_PALETTIZATION": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools.PassPipeline", - "module_short": "coremltools.PassPipeline", - "name": "DEFAULT_PALETTIZATION" - } - ], - "ct.TensorType": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools", - "module_short": "coremltools", - "name": "TensorType" - } - ], - "ct.convert": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools", - "module_short": "coremltools", - "name": "convert" - } - ], - "ct.target.iOS16": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools.target", - "module_short": "coremltools.target", - "name": "iOS16" - } - ], - "datasets.MNIST": [ - { - "is_class": false, - "is_explicit": false, - "module": "torchvision.datasets", - "module_short": "torchvision.datasets", - "name": "MNIST" - } - ], - "nn.BatchNorm2d": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "BatchNorm2d" - } - ], - "nn.Conv2d": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "Conv2d" - } - ], - "nn.Dropout": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "Dropout" - } - ], - "nn.Flatten": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "Flatten" - } - ], - "nn.Linear": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "Linear" - } - ], - "nn.LogSoftmax": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "LogSoftmax" - } - ], - "nn.MaxPool2d": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "MaxPool2d" - } - ], - "nn.ReLU": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "ReLU" - } - ], - "nn.Sequential": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "Sequential" - } - ], - "os.makedirs": [ - { - "is_class": false, - "is_explicit": false, - "module": "os", - "module_short": "os", - "name": "makedirs" - } - ], - "os.path.exists": [ - { - "is_class": false, - "is_explicit": false, - "module": "os.path", - "module_short": "os.path", - "name": "exists" - } - ], - "os.path.expanduser": [ - { - "is_class": false, - "is_explicit": false, - "module": "os.path", - "module_short": "os.path", - "name": "expanduser" - } - ], - "torch.jit.trace": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.jit", - "module_short": "torch.jit", - "name": "trace" - } - ], - "torch.no_grad": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch", - "module_short": "torch", - "name": "no_grad" - } - ], - "torch.optim.SGD": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.optim", - "module_short": "torch.optim", - "name": "SGD" - } - ], - "torch.rand": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch", - "module_short": "torch", - "name": "rand" - } - ], - "torch.utils.data.DataLoader": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.utils.data", - "module_short": "torch.utils.data", - "name": "DataLoader" - } - ], - "transforms.Compose": [ - { - "is_class": false, - "is_explicit": false, - "module": "torchvision.transforms", - "module_short": "torchvision.transforms", - "name": "Compose" - } - ], - "transforms.Normalize": [ - { - "is_class": false, - "is_explicit": false, - "module": "torchvision.transforms", - "module_short": "torchvision.transforms", - "name": "Normalize" - } - ], - "transforms.ToTensor": [ - { - "is_class": false, - "is_explicit": false, - "module": "torchvision.transforms", - "module_short": "torchvision.transforms", - "name": "ToTensor" - } - ] -} \ No newline at end of file diff --git a/docs/_build/html/_examples/dkm_palettization.html b/docs/_examples/dkm_palettization.html similarity index 98% rename from docs/_build/html/_examples/dkm_palettization.html rename to docs/_examples/dkm_palettization.html index 6d7d57165..ae60273b9 100644 --- a/docs/_build/html/_examples/dkm_palettization.html +++ b/docs/_examples/dkm_palettization.html @@ -1,12 +1,14 @@ + + - Palettization Using Differentiable K-Means — coremltools API Reference 8.0b1 documentation + Palettization Using Differentiable K-Means — coremltools API Reference 8.1 documentation - + @@ -15,16 +17,12 @@ - - - - - - - - + + + + + + @@ -43,9 +41,6 @@
    coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_examples/dkm_palettization.ipynb b/docs/_examples/dkm_palettization.ipynb deleted file mode 100644 index a90e93dad..000000000 --- a/docs/_examples/dkm_palettization.ipynb +++ /dev/null @@ -1,201 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n\n# Palettization Using Differentiable K-Means\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In this tutorial, you learn how to palettize a\nnetwork trained on [MNIST](http://yann.lecun.com/exdb/mnist/) using\n:py:class:`~.palettizer.DKMPalettizer`.\n\nLearn more about other palettization in the coremltools \n[Training-Time Palettization Documentation](https://coremltools.readme.io/v7.0/docs/training-time-palettization).\n\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Defining the Network and Dataset\n\nFirst, define your network:\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from collections import OrderedDict\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\n\ndef mnist_net(num_classes=10):\n return nn.Sequential(OrderedDict([\n ('conv1', nn.Conv2d(1, 32, 5, padding='same')),\n ('relu1', nn.ReLU()),\n ('pool1', nn.MaxPool2d(2, stride=2, padding=0)),\n ('bn1', nn.BatchNorm2d(32, eps=0.001, momentum=0.01)),\n ('conv2', nn.Conv2d(32, 64, 5, padding='same')),\n ('relu2', nn.ReLU()),\n ('pool2', nn.MaxPool2d(2, stride=2, padding=0)),\n ('flatten', nn.Flatten()),\n ('dense1', nn.Linear(3136, 1024)),\n ('relu3', nn.ReLU()),\n ('dropout', nn.Dropout(p=0.4)),\n ('dense2', nn.Linear(1024, num_classes)),\n ('softmax', nn.LogSoftmax())]))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "For training, use the MNIST dataset provided by\n[PyTorch](https://pytorch.org/vision/stable/generated/torchvision.datasets.MNIST.html#mnist).\nApply a very simple transformation to the input images to normalize them.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "import os\n\nfrom torchvision import datasets, transforms\n\n\ndef mnist_dataset(data_dir=\"~/.mnist_palettization_data\"):\n transform = transforms.Compose(\n [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]\n )\n data_path = os.path.expanduser(f\"{data_dir}/mnist\")\n if not os.path.exists(data_path):\n os.makedirs(data_path)\n train = datasets.MNIST(data_path, train=True, download=True, transform=transform)\n test = datasets.MNIST(data_path, train=False, transform=transform)\n return train, test" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Initialize the model and the dataset.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "model = mnist_net()\n\nbatch_size = 128\ntrain_dataset, test_dataset = mnist_dataset(\"~/.mnist_data/mnist_palettization\")\ntrain_loader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size, shuffle=True)\ntest_loader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Training the Model Without Palettization\n\nTrain the model without applying any palettization.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "optimizer = torch.optim.SGD(model.parameters(), lr=0.008)\naccuracy_unpalettized = 0.0\nnum_epochs = 2\n\n\ndef train_step(model, optimizer, train_loader, data, target, batch_idx, epoch, palettizer = None):\n optimizer.zero_grad()\n if palettizer is not None:\n palettizer.step()\n output = model(data)\n loss = F.nll_loss(output, target)\n loss.backward()\n optimizer.step()\n if batch_idx % 100 == 0:\n print('Train Epoch: {} [{}/{} ({:.0f}%)]\\tLoss: {:.6f}'.format(\n epoch, batch_idx * len(data), len(train_loader.dataset),\n 100. * batch_idx / len(train_loader), loss.item()))\n\n\ndef eval_model(model, test_loader):\n model.eval()\n test_loss = 0\n correct = 0\n with torch.no_grad():\n for data, target in test_loader:\n output = model(data)\n test_loss += F.nll_loss(output, target, reduction='sum').item()\n pred = output.argmax(dim=1, keepdim=True)\n correct += pred.eq(target.view_as(pred)).sum().item()\n\n test_loss /= len(test_loader.dataset)\n accuracy = 100. * correct / len(test_loader.dataset)\n\n print(\n \"\\nTest set: Average loss: {:.4f}, Accuracy: {:.1f}%\\n\".format(\n test_loss, accuracy\n )\n )\n return accuracy\n\n\nfor epoch in range(num_epochs):\n # train one epoch\n model.train()\n for batch_idx, (data, target) in enumerate(train_loader):\n train_step(model, optimizer, train_loader, data, target, batch_idx, epoch)\n\n # evaluate\n accuracy_unpalettized = eval_model(model, test_loader)\n\nprint(\"Accuracy of unpalettized network: {:.1f}%\\n\".format(accuracy_unpalettized))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Configuring Palettization\n\nInsert palettization layers into the trained model.\nFor this example, apply a ``4-bit`` palettization to the ``conv2`` layer. This\nwould mean that for all the weights that exist in this layer, you try to map\neach weight element to one of $2^4$,\nthat is, ``16`` clusters.\n\nNote that calling :py:meth:`~.palettization.DKMPalettizer.prepare` simply inserts palettization\nlayers into the model. It doesn't actually palettize the weights. You do that in the next step when\nyou fine-tune the model.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from coremltools.optimize.torch.palettization import DKMPalettizer, DKMPalettizerConfig\n\nconfig = DKMPalettizerConfig.from_dict(\n {\"module_name_configs\": {\"conv2\": {\"n_bits\": 4}}}\n)\npalettizer = DKMPalettizer(model, config)\n\nprepared_model = palettizer.prepare()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Fine-Tuning the Palettized Model\n\nFine-tune the model with palettization applied. This helps the model learn the new palettized\nlayers' weights in the form of a LUT and indices.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "optimizer = torch.optim.SGD(prepared_model.parameters(), lr=0.008)\naccuracy_palettized = 0.0\nnum_epochs = 2\n\nfor epoch in range(num_epochs):\n prepared_model.train()\n for batch_idx, (data, target) in enumerate(train_loader):\n train_step(prepared_model, optimizer, train_loader, data, target, batch_idx, epoch, palettizer)\n\n # evaluate\n accuracy_palettized = eval_model(prepared_model, test_loader)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The evaluation shows that you can train a palettized network without losing much accuracy\nwith the final model.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "print(\"Accuracy of unpalettized network: {:.1f}%\\n\".format(accuracy_unpalettized))\nprint(\"Accuracy of palettized network: {:.1f}%\\n\".format(accuracy_palettized))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Restoring LUT and Indices as Weights\n\nUse :py:meth:`~.palettization.Palettizer.finalize` to\nrestore the LUT and indices of the palettized modules as weights in the model.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "finalized_model = palettizer.finalize()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Exporting the Model for On-Device Execution\n\nTo deploy the model on device, convert it to a Core ML model.\n\nTo export the model with Core ML Tools, first trace the model with an input, and then\nuse the Core ML Tools converter, as described in\n[Converting from PyTorch](https://coremltools.readme.io/docs/pytorch-conversion).\nThe parameter ``ct.PassPipeline.DEFAULT_PALETTIZATION`` signals to the\nconverter a palettized model is being converted, and allows its weights to be\nrepresented using a look-up table (LUT) and indices, which have a much smaller\nfootprint on disk as compared to the dense weights.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "import coremltools as ct\n\nfinalized_model.eval()\nexample_input = torch.rand(1, 1, 28, 28)\ntraced_model = torch.jit.trace(finalized_model, example_input)\n\n\ncoreml_model = ct.convert(\n traced_model,\n inputs=[ct.TensorType(shape=example_input.shape)],\n pass_pipeline=ct.PassPipeline.DEFAULT_PALETTIZATION,\n minimum_deployment_target=ct.target.iOS16,\n)\n\ncoreml_model.save(\"~/.mnist_palettization_data/palettized_model.mlpackage\")" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.14" - } - }, - "nbformat": 4, - "nbformat_minor": 0 -} \ No newline at end of file diff --git a/docs/_examples/dkm_palettization.py b/docs/_examples/dkm_palettization.py deleted file mode 100644 index ecb919972..000000000 --- a/docs/_examples/dkm_palettization.py +++ /dev/null @@ -1,225 +0,0 @@ -# -*- coding: utf-8 -*- -""" -.. _palettization_tutorial: - -Palettization Using Differentiable K-Means -========================================== - -""" - -######################################################################## -# In this tutorial, you learn how to palettize a -# network trained on `MNIST `_ using -# :py:class:`~.palettizer.DKMPalettizer`. -# -# Learn more about other palettization in the coremltools -# `Training-Time Palettization Documentation `_. - - -######################################################################## -# Defining the Network and Dataset -# -------------------------------- -# -# First, define your network: - -from collections import OrderedDict - -import torch -import torch.nn as nn -import torch.nn.functional as F - - -def mnist_net(num_classes=10): - return nn.Sequential(OrderedDict([ - ('conv1', nn.Conv2d(1, 32, 5, padding='same')), - ('relu1', nn.ReLU()), - ('pool1', nn.MaxPool2d(2, stride=2, padding=0)), - ('bn1', nn.BatchNorm2d(32, eps=0.001, momentum=0.01)), - ('conv2', nn.Conv2d(32, 64, 5, padding='same')), - ('relu2', nn.ReLU()), - ('pool2', nn.MaxPool2d(2, stride=2, padding=0)), - ('flatten', nn.Flatten()), - ('dense1', nn.Linear(3136, 1024)), - ('relu3', nn.ReLU()), - ('dropout', nn.Dropout(p=0.4)), - ('dense2', nn.Linear(1024, num_classes)), - ('softmax', nn.LogSoftmax())])) - - -######################################################################## -# For training, use the MNIST dataset provided by -# `PyTorch `_. -# Apply a very simple transformation to the input images to normalize them. - -import os - -from torchvision import datasets, transforms - - -def mnist_dataset(data_dir="~/.mnist_palettization_data"): - transform = transforms.Compose( - [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))] - ) - data_path = os.path.expanduser(f"{data_dir}/mnist") - if not os.path.exists(data_path): - os.makedirs(data_path) - train = datasets.MNIST(data_path, train=True, download=True, transform=transform) - test = datasets.MNIST(data_path, train=False, transform=transform) - return train, test - - -######################################################################## -# Initialize the model and the dataset. - -model = mnist_net() - -batch_size = 128 -train_dataset, test_dataset = mnist_dataset("~/.mnist_data/mnist_palettization") -train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size, shuffle=True) -test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size) - -######################################################################## -# Training the Model Without Palettization -# ---------------------------------------- -# -# Train the model without applying any palettization. - -optimizer = torch.optim.SGD(model.parameters(), lr=0.008) -accuracy_unpalettized = 0.0 -num_epochs = 2 - - -def train_step(model, optimizer, train_loader, data, target, batch_idx, epoch, palettizer = None): - optimizer.zero_grad() - if palettizer is not None: - palettizer.step() - output = model(data) - loss = F.nll_loss(output, target) - loss.backward() - optimizer.step() - if batch_idx % 100 == 0: - print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format( - epoch, batch_idx * len(data), len(train_loader.dataset), - 100. * batch_idx / len(train_loader), loss.item())) - - -def eval_model(model, test_loader): - model.eval() - test_loss = 0 - correct = 0 - with torch.no_grad(): - for data, target in test_loader: - output = model(data) - test_loss += F.nll_loss(output, target, reduction='sum').item() - pred = output.argmax(dim=1, keepdim=True) - correct += pred.eq(target.view_as(pred)).sum().item() - - test_loss /= len(test_loader.dataset) - accuracy = 100. * correct / len(test_loader.dataset) - - print( - "\nTest set: Average loss: {:.4f}, Accuracy: {:.1f}%\n".format( - test_loss, accuracy - ) - ) - return accuracy - - -for epoch in range(num_epochs): - # train one epoch - model.train() - for batch_idx, (data, target) in enumerate(train_loader): - train_step(model, optimizer, train_loader, data, target, batch_idx, epoch) - - # evaluate - accuracy_unpalettized = eval_model(model, test_loader) - -print("Accuracy of unpalettized network: {:.1f}%\n".format(accuracy_unpalettized)) - -######################################################################## -# Configuring Palettization -# ------------------------- -# -# Insert palettization layers into the trained model. -# For this example, apply a ``4-bit`` palettization to the ``conv2`` layer. This -# would mean that for all the weights that exist in this layer, you try to map -# each weight element to one of :math:`2^4`, -# that is, ``16`` clusters. -# -# Note that calling :py:meth:`~.palettization.DKMPalettizer.prepare` simply inserts palettization -# layers into the model. It doesn't actually palettize the weights. You do that in the next step when -# you fine-tune the model. - -from coremltools.optimize.torch.palettization import DKMPalettizer, DKMPalettizerConfig - -config = DKMPalettizerConfig.from_dict( - {"module_name_configs": {"conv2": {"n_bits": 4}}} -) -palettizer = DKMPalettizer(model, config) - -prepared_model = palettizer.prepare() - -######################################################################## -# Fine-Tuning the Palettized Model -# -------------------------------- -# -# Fine-tune the model with palettization applied. This helps the model learn the new palettized -# layers' weights in the form of a LUT and indices. - -optimizer = torch.optim.SGD(prepared_model.parameters(), lr=0.008) -accuracy_palettized = 0.0 -num_epochs = 2 - -for epoch in range(num_epochs): - prepared_model.train() - for batch_idx, (data, target) in enumerate(train_loader): - train_step(prepared_model, optimizer, train_loader, data, target, batch_idx, epoch, palettizer) - - # evaluate - accuracy_palettized = eval_model(prepared_model, test_loader) - -######################################################################## -# The evaluation shows that you can train a palettized network without losing much accuracy -# with the final model. - -print("Accuracy of unpalettized network: {:.1f}%\n".format(accuracy_unpalettized)) -print("Accuracy of palettized network: {:.1f}%\n".format(accuracy_palettized)) - -######################################################################## -# Restoring LUT and Indices as Weights -# ------------------------------------ -# -# Use :py:meth:`~.palettization.Palettizer.finalize` to -# restore the LUT and indices of the palettized modules as weights in the model. - -finalized_model = palettizer.finalize() - -######################################################################## -# Exporting the Model for On-Device Execution -# ------------------------------------------- -# -# To deploy the model on device, convert it to a Core ML model. -# -# To export the model with Core ML Tools, first trace the model with an input, and then -# use the Core ML Tools converter, as described in -# `Converting from PyTorch `_. -# The parameter ``ct.PassPipeline.DEFAULT_PALETTIZATION`` signals to the -# converter a palettized model is being converted, and allows its weights to be -# represented using a look-up table (LUT) and indices, which have a much smaller -# footprint on disk as compared to the dense weights. - -import coremltools as ct - -finalized_model.eval() -example_input = torch.rand(1, 1, 28, 28) -traced_model = torch.jit.trace(finalized_model, example_input) - - -coreml_model = ct.convert( - traced_model, - inputs=[ct.TensorType(shape=example_input.shape)], - pass_pipeline=ct.PassPipeline.DEFAULT_PALETTIZATION, - minimum_deployment_target=ct.target.iOS16, -) - -coreml_model.save("~/.mnist_palettization_data/palettized_model.mlpackage") diff --git a/docs/_examples/dkm_palettization.rst b/docs/_examples/dkm_palettization.rst deleted file mode 100644 index 71660043e..000000000 --- a/docs/_examples/dkm_palettization.rst +++ /dev/null @@ -1,329 +0,0 @@ - -.. DO NOT EDIT. -.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. -.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: -.. "_examples/dkm_palettization.py" -.. LINE NUMBERS ARE GIVEN BELOW. - -.. only:: html - - .. note:: - :class: sphx-glr-download-link-note - - :ref:`Go to the end ` - to download the full example code. - -.. rst-class:: sphx-glr-example-title - -.. _sphx_glr__examples_dkm_palettization.py: - - -.. _palettization_tutorial: - -Palettization Using Differentiable K-Means -========================================== - -.. GENERATED FROM PYTHON SOURCE LINES 11-17 - -In this tutorial, you learn how to palettize a -network trained on `MNIST `_ using -:py:class:`~.palettizer.DKMPalettizer`. - -Learn more about other palettization in the coremltools -`Training-Time Palettization Documentation `_. - -.. GENERATED FROM PYTHON SOURCE LINES 20-24 - -Defining the Network and Dataset --------------------------------- - -First, define your network: - -.. GENERATED FROM PYTHON SOURCE LINES 24-49 - -.. code-block:: Python - - - from collections import OrderedDict - - import torch - import torch.nn as nn - import torch.nn.functional as F - - - def mnist_net(num_classes=10): - return nn.Sequential(OrderedDict([ - ('conv1', nn.Conv2d(1, 32, 5, padding='same')), - ('relu1', nn.ReLU()), - ('pool1', nn.MaxPool2d(2, stride=2, padding=0)), - ('bn1', nn.BatchNorm2d(32, eps=0.001, momentum=0.01)), - ('conv2', nn.Conv2d(32, 64, 5, padding='same')), - ('relu2', nn.ReLU()), - ('pool2', nn.MaxPool2d(2, stride=2, padding=0)), - ('flatten', nn.Flatten()), - ('dense1', nn.Linear(3136, 1024)), - ('relu3', nn.ReLU()), - ('dropout', nn.Dropout(p=0.4)), - ('dense2', nn.Linear(1024, num_classes)), - ('softmax', nn.LogSoftmax())])) - - - -.. GENERATED FROM PYTHON SOURCE LINES 50-53 - -For training, use the MNIST dataset provided by -`PyTorch `_. -Apply a very simple transformation to the input images to normalize them. - -.. GENERATED FROM PYTHON SOURCE LINES 53-71 - -.. code-block:: Python - - - import os - - from torchvision import datasets, transforms - - - def mnist_dataset(data_dir="~/.mnist_palettization_data"): - transform = transforms.Compose( - [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))] - ) - data_path = os.path.expanduser(f"{data_dir}/mnist") - if not os.path.exists(data_path): - os.makedirs(data_path) - train = datasets.MNIST(data_path, train=True, download=True, transform=transform) - test = datasets.MNIST(data_path, train=False, transform=transform) - return train, test - - - -.. GENERATED FROM PYTHON SOURCE LINES 72-73 - -Initialize the model and the dataset. - -.. GENERATED FROM PYTHON SOURCE LINES 73-81 - -.. code-block:: Python - - - model = mnist_net() - - batch_size = 128 - train_dataset, test_dataset = mnist_dataset("~/.mnist_data/mnist_palettization") - train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size, shuffle=True) - test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size) - - -.. GENERATED FROM PYTHON SOURCE LINES 82-86 - -Training the Model Without Palettization ----------------------------------------- - -Train the model without applying any palettization. - -.. GENERATED FROM PYTHON SOURCE LINES 86-139 - -.. code-block:: Python - - - optimizer = torch.optim.SGD(model.parameters(), lr=0.008) - accuracy_unpalettized = 0.0 - num_epochs = 2 - - - def train_step(model, optimizer, train_loader, data, target, batch_idx, epoch, palettizer = None): - optimizer.zero_grad() - if palettizer is not None: - palettizer.step() - output = model(data) - loss = F.nll_loss(output, target) - loss.backward() - optimizer.step() - if batch_idx % 100 == 0: - print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format( - epoch, batch_idx * len(data), len(train_loader.dataset), - 100. * batch_idx / len(train_loader), loss.item())) - - - def eval_model(model, test_loader): - model.eval() - test_loss = 0 - correct = 0 - with torch.no_grad(): - for data, target in test_loader: - output = model(data) - test_loss += F.nll_loss(output, target, reduction='sum').item() - pred = output.argmax(dim=1, keepdim=True) - correct += pred.eq(target.view_as(pred)).sum().item() - - test_loss /= len(test_loader.dataset) - accuracy = 100. * correct / len(test_loader.dataset) - - print( - "\nTest set: Average loss: {:.4f}, Accuracy: {:.1f}%\n".format( - test_loss, accuracy - ) - ) - return accuracy - - - for epoch in range(num_epochs): - # train one epoch - model.train() - for batch_idx, (data, target) in enumerate(train_loader): - train_step(model, optimizer, train_loader, data, target, batch_idx, epoch) - - # evaluate - accuracy_unpalettized = eval_model(model, test_loader) - - print("Accuracy of unpalettized network: {:.1f}%\n".format(accuracy_unpalettized)) - - -.. GENERATED FROM PYTHON SOURCE LINES 140-152 - -Configuring Palettization -------------------------- - -Insert palettization layers into the trained model. -For this example, apply a ``4-bit`` palettization to the ``conv2`` layer. This -would mean that for all the weights that exist in this layer, you try to map -each weight element to one of :math:`2^4`, -that is, ``16`` clusters. - -Note that calling :py:meth:`~.palettization.DKMPalettizer.prepare` simply inserts palettization -layers into the model. It doesn't actually palettize the weights. You do that in the next step when -you fine-tune the model. - -.. GENERATED FROM PYTHON SOURCE LINES 152-162 - -.. code-block:: Python - - - from coremltools.optimize.torch.palettization import DKMPalettizer, DKMPalettizerConfig - - config = DKMPalettizerConfig.from_dict( - {"module_name_configs": {"conv2": {"n_bits": 4}}} - ) - palettizer = DKMPalettizer(model, config) - - prepared_model = palettizer.prepare() - - -.. GENERATED FROM PYTHON SOURCE LINES 163-168 - -Fine-Tuning the Palettized Model --------------------------------- - -Fine-tune the model with palettization applied. This helps the model learn the new palettized -layers' weights in the form of a LUT and indices. - -.. GENERATED FROM PYTHON SOURCE LINES 168-181 - -.. code-block:: Python - - - optimizer = torch.optim.SGD(prepared_model.parameters(), lr=0.008) - accuracy_palettized = 0.0 - num_epochs = 2 - - for epoch in range(num_epochs): - prepared_model.train() - for batch_idx, (data, target) in enumerate(train_loader): - train_step(prepared_model, optimizer, train_loader, data, target, batch_idx, epoch, palettizer) - - # evaluate - accuracy_palettized = eval_model(prepared_model, test_loader) - - -.. GENERATED FROM PYTHON SOURCE LINES 182-184 - -The evaluation shows that you can train a palettized network without losing much accuracy -with the final model. - -.. GENERATED FROM PYTHON SOURCE LINES 184-188 - -.. code-block:: Python - - - print("Accuracy of unpalettized network: {:.1f}%\n".format(accuracy_unpalettized)) - print("Accuracy of palettized network: {:.1f}%\n".format(accuracy_palettized)) - - -.. GENERATED FROM PYTHON SOURCE LINES 189-194 - -Restoring LUT and Indices as Weights ------------------------------------- - -Use :py:meth:`~.palettization.Palettizer.finalize` to -restore the LUT and indices of the palettized modules as weights in the model. - -.. GENERATED FROM PYTHON SOURCE LINES 194-197 - -.. code-block:: Python - - - finalized_model = palettizer.finalize() - - -.. GENERATED FROM PYTHON SOURCE LINES 198-210 - -Exporting the Model for On-Device Execution -------------------------------------------- - -To deploy the model on device, convert it to a Core ML model. - -To export the model with Core ML Tools, first trace the model with an input, and then -use the Core ML Tools converter, as described in -`Converting from PyTorch `_. -The parameter ``ct.PassPipeline.DEFAULT_PALETTIZATION`` signals to the -converter a palettized model is being converted, and allows its weights to be -represented using a look-up table (LUT) and indices, which have a much smaller -footprint on disk as compared to the dense weights. - -.. GENERATED FROM PYTHON SOURCE LINES 210-226 - -.. code-block:: Python - - - import coremltools as ct - - finalized_model.eval() - example_input = torch.rand(1, 1, 28, 28) - traced_model = torch.jit.trace(finalized_model, example_input) - - - coreml_model = ct.convert( - traced_model, - inputs=[ct.TensorType(shape=example_input.shape)], - pass_pipeline=ct.PassPipeline.DEFAULT_PALETTIZATION, - minimum_deployment_target=ct.target.iOS16, - ) - - coreml_model.save("~/.mnist_palettization_data/palettized_model.mlpackage") - - -.. _sphx_glr_download__examples_dkm_palettization.py: - -.. only:: html - - .. container:: sphx-glr-footer sphx-glr-footer-example - - .. container:: sphx-glr-download sphx-glr-download-jupyter - - :download:`Download Jupyter notebook: dkm_palettization.ipynb ` - - .. container:: sphx-glr-download sphx-glr-download-python - - :download:`Download Python source code: dkm_palettization.py ` - - .. container:: sphx-glr-download sphx-glr-download-zip - - :download:`Download zipped: dkm_palettization.zip ` - - -.. only:: html - - .. rst-class:: sphx-glr-signature - - `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_examples/dkm_palettization.zip b/docs/_examples/dkm_palettization.zip deleted file mode 100644 index 1f074f966..000000000 Binary files a/docs/_examples/dkm_palettization.zip and /dev/null differ diff --git a/docs/_examples/images/thumb/sphx_glr_dkm_palettization_thumb.png b/docs/_examples/images/thumb/sphx_glr_dkm_palettization_thumb.png deleted file mode 100644 index 8a5fed589..000000000 Binary files a/docs/_examples/images/thumb/sphx_glr_dkm_palettization_thumb.png and /dev/null differ diff --git a/docs/_examples/images/thumb/sphx_glr_linear_quantization_thumb.png b/docs/_examples/images/thumb/sphx_glr_linear_quantization_thumb.png deleted file mode 100644 index 8a5fed589..000000000 Binary files a/docs/_examples/images/thumb/sphx_glr_linear_quantization_thumb.png and /dev/null differ diff --git a/docs/_examples/images/thumb/sphx_glr_magnitude_pruning_thumb.png b/docs/_examples/images/thumb/sphx_glr_magnitude_pruning_thumb.png deleted file mode 100644 index 8a5fed589..000000000 Binary files a/docs/_examples/images/thumb/sphx_glr_magnitude_pruning_thumb.png and /dev/null differ diff --git a/docs/_build/html/_examples/index.html b/docs/_examples/index.html similarity index 91% rename from docs/_build/html/_examples/index.html rename to docs/_examples/index.html index 16b18e17b..c9d2d6998 100644 --- a/docs/_build/html/_examples/index.html +++ b/docs/_examples/index.html @@ -1,12 +1,14 @@ + + - <no title> — coremltools API Reference 8.0b1 documentation + <no title> — coremltools API Reference 8.1 documentation - + @@ -15,15 +17,11 @@ - - - - - - - + + + + + @@ -40,9 +38,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_examples/index.rst b/docs/_examples/index.rst deleted file mode 100644 index bd0802f88..000000000 --- a/docs/_examples/index.rst +++ /dev/null @@ -1,59 +0,0 @@ -:orphan: - - - - -.. raw:: html - -
    - -.. thumbnail-parent-div-open - -.. raw:: html - -
    - -.. only:: html - - .. image:: /_examples/images/thumb/sphx_glr_linear_quantization_thumb.png - :alt: - - :ref:`sphx_glr__examples_linear_quantization.py` - -.. raw:: html - -
    Linear Quantization
    -
    - - -.. thumbnail-parent-div-close - -.. raw:: html - -
    - - -.. toctree:: - :hidden: - - /_examples/linear_quantization - - -.. only:: html - - .. container:: sphx-glr-footer sphx-glr-footer-gallery - - .. container:: sphx-glr-download sphx-glr-download-python - - :download:`Download all examples in Python source code: _examples_python.zip ` - - .. container:: sphx-glr-download sphx-glr-download-jupyter - - :download:`Download all examples in Jupyter notebooks: _examples_jupyter.zip ` - - -.. only:: html - - .. rst-class:: sphx-glr-signature - - `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_examples/linear_quantization.codeobj.json b/docs/_examples/linear_quantization.codeobj.json deleted file mode 100644 index 23aadbd05..000000000 --- a/docs/_examples/linear_quantization.codeobj.json +++ /dev/null @@ -1,315 +0,0 @@ -{ - ".pruning.LinearQuantizer.finalize": [ - { - "is_class": false, - "is_explicit": true, - "module": ".pruning.LinearQuantizer", - "module_short": ".pruning.LinearQuantizer", - "name": "finalize" - } - ], - ".quantization.LinearQuantizer": [ - { - "is_class": false, - "is_explicit": true, - "module": ".quantization", - "module_short": ".quantization", - "name": "LinearQuantizer" - }, - { - "is_class": false, - "is_explicit": true, - "module": ".quantization", - "module_short": ".quantization", - "name": "LinearQuantizer" - } - ], - ".quantization.LinearQuantizer.prepare": [ - { - "is_class": false, - "is_explicit": true, - "module": ".quantization.LinearQuantizer", - "module_short": ".quantization.LinearQuantizer", - "name": "prepare" - } - ], - ".quantization.LinearQuantizer.step": [ - { - "is_class": false, - "is_explicit": true, - "module": ".quantization.LinearQuantizer", - "module_short": ".quantization.LinearQuantizer", - "name": "step" - } - ], - ".quantization.LinearQuantizerConfig": [ - { - "is_class": false, - "is_explicit": true, - "module": ".quantization", - "module_short": ".quantization", - "name": "LinearQuantizerConfig" - } - ], - "F.nll_loss": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn.functional", - "module_short": "torch.nn.functional", - "name": "nll_loss" - } - ], - "LinearQuantizer": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools.optimize.torch.quantization", - "module_short": "coremltools.optimize.torch.quantization", - "name": "LinearQuantizer" - } - ], - "LinearQuantizerConfig": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools.optimize.torch.quantization", - "module_short": "coremltools.optimize.torch.quantization", - "name": "LinearQuantizerConfig" - } - ], - "ModuleLinearQuantizerConfig": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools.optimize.torch.quantization", - "module_short": "coremltools.optimize.torch.quantization", - "name": "ModuleLinearQuantizerConfig" - } - ], - "OrderedDict": [ - { - "is_class": false, - "is_explicit": false, - "module": "collections", - "module_short": "collections", - "name": "OrderedDict" - } - ], - "ct.TensorType": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools", - "module_short": "coremltools", - "name": "TensorType" - } - ], - "ct.convert": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools", - "module_short": "coremltools", - "name": "convert" - } - ], - "ct.target.iOS17": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools.target", - "module_short": "coremltools.target", - "name": "iOS17" - } - ], - "datasets.MNIST": [ - { - "is_class": false, - "is_explicit": false, - "module": "torchvision.datasets", - "module_short": "torchvision.datasets", - "name": "MNIST" - } - ], - "nn.Conv2d": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "Conv2d" - } - ], - "nn.Flatten": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "Flatten" - } - ], - "nn.Linear": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "Linear" - } - ], - "nn.LogSoftmax": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "LogSoftmax" - } - ], - "nn.MaxPool2d": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "MaxPool2d" - } - ], - "nn.ReLU": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "ReLU" - } - ], - "nn.Sequential": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "Sequential" - } - ], - "np.testing.assert_allclose": [ - { - "is_class": false, - "is_explicit": false, - "module": "numpy.testing", - "module_short": "numpy.testing", - "name": "assert_allclose" - } - ], - "os.makedirs": [ - { - "is_class": false, - "is_explicit": false, - "module": "os", - "module_short": "os", - "name": "makedirs" - } - ], - "os.path.exists": [ - { - "is_class": false, - "is_explicit": false, - "module": "os.path", - "module_short": "os.path", - "name": "exists" - } - ], - "os.path.expanduser": [ - { - "is_class": false, - "is_explicit": false, - "module": "os.path", - "module_short": "os.path", - "name": "expanduser" - } - ], - "torch.jit.trace": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.jit", - "module_short": "torch.jit", - "name": "trace" - } - ], - "torch.no_grad": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch", - "module_short": "torch", - "name": "no_grad" - } - ], - "torch.optim.Adam": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.optim", - "module_short": "torch.optim", - "name": "Adam" - } - ], - "torch.rand": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch", - "module_short": "torch", - "name": "rand" - } - ], - "torch.randn": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch", - "module_short": "torch", - "name": "randn" - } - ], - "torch.utils.data.DataLoader": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.utils.data", - "module_short": "torch.utils.data", - "name": "DataLoader" - } - ], - "transforms.Compose": [ - { - "is_class": false, - "is_explicit": false, - "module": "torchvision.transforms", - "module_short": "torchvision.transforms", - "name": "Compose" - } - ], - "transforms.Normalize": [ - { - "is_class": false, - "is_explicit": false, - "module": "torchvision.transforms", - "module_short": "torchvision.transforms", - "name": "Normalize" - } - ], - "transforms.ToTensor": [ - { - "is_class": false, - "is_explicit": false, - "module": "torchvision.transforms", - "module_short": "torchvision.transforms", - "name": "ToTensor" - } - ] -} \ No newline at end of file diff --git a/docs/_build/html/_examples/linear_quantization.html b/docs/_examples/linear_quantization.html similarity index 98% rename from docs/_build/html/_examples/linear_quantization.html rename to docs/_examples/linear_quantization.html index eeebcac43..24171fb33 100644 --- a/docs/_build/html/_examples/linear_quantization.html +++ b/docs/_examples/linear_quantization.html @@ -1,12 +1,14 @@ + + - Linear Quantization — coremltools API Reference 8.0b1 documentation + Linear Quantization — coremltools API Reference 8.1 documentation - + @@ -15,15 +17,11 @@ - - - - - - - + + + + + @@ -42,9 +40,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_examples/linear_quantization.py b/docs/_examples/linear_quantization.py deleted file mode 100644 index 9328a3222..000000000 --- a/docs/_examples/linear_quantization.py +++ /dev/null @@ -1,244 +0,0 @@ -# -*- coding: utf-8 -*- -""" -.. _linear_quantization_tutorial: - -Linear Quantization -=================== - -""" - -######################################################################## -# In this tutorial, you learn how to train a simple convolutional neural network on -# `MNIST `_ using :py:class:`~.quantization.LinearQuantizer`. -# -# Learn more about other quantization in the coremltools -# `Training-Time Quantization Documentation `_. -# - -######################################################################## -# Network and Dataset Definition -# ------------------------------ -# First define your network, which consists of a single convolution layer -# followed by a dense (linear) layer. - -from collections import OrderedDict - -import numpy as np -import torch -import torch.nn as nn -import torch.nn.functional as F - - -def mnist_net(num_classes=10): - return nn.Sequential( - OrderedDict( - [ - ("conv", nn.Conv2d(1, 12, 3, padding=1)), - ("relu", nn.ReLU()), - ("pool", nn.MaxPool2d(2, stride=2, padding=0)), - ("flatten", nn.Flatten()), - ("dense", nn.Linear(2352, num_classes)), - ("softmax", nn.LogSoftmax()), - ] - ) - ) - - -######################################################################## -# Use the `MNIST dataset provided by PyTorch `_ -# for training. Apply a very simple transformation to the input -# images to normalize them. - -import os - -from torchvision import datasets, transforms - - -def mnist_dataset(data_dir="~/.mnist_qat_data"): - transform = transforms.Compose( - [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))] - ) - data_path = os.path.expanduser(f"{data_dir}/mnist") - if not os.path.exists(data_path): - os.makedirs(data_path) - train = datasets.MNIST(data_path, train=True, download=True, transform=transform) - test = datasets.MNIST(data_path, train=False, transform=transform) - return train, test - - -######################################################################## -# Next, initialize the model and the dataset. - -model = mnist_net() - -batch_size = 128 -train_dataset, test_dataset = mnist_dataset() -train_loader = torch.utils.data.DataLoader( - train_dataset, batch_size=batch_size, shuffle=True -) -test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size) - -######################################################################## -# Training the Model Without Quantization -# --------------------------------------- -# Train the model without any quantization applied. - -optimizer = torch.optim.Adam(model.parameters(), eps=1e-07) -accuracy_unquantized = 0.0 -num_epochs = 4 - - -def train_step(model, optimizer, train_loader, data, target, batch_idx, epoch): - optimizer.zero_grad() - output = model(data) - loss = F.nll_loss(output, target) - loss.backward() - optimizer.step() - if batch_idx % 100 == 0: - print( - "Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}".format( - epoch, - batch_idx * len(data), - len(train_loader.dataset), - 100.0 * batch_idx / len(train_loader), - loss.item(), - ) - ) - - -def eval_model(model, test_loader): - model.eval() - test_loss = 0 - correct = 0 - with torch.no_grad(): - for data, target in test_loader: - output = model(data) - test_loss += F.nll_loss(output, target, reduction="sum").item() - pred = output.argmax(dim=1, keepdim=True) - correct += pred.eq(target.view_as(pred)).sum().item() - - test_loss /= len(test_loader.dataset) - accuracy = 100.0 * correct / len(test_loader.dataset) - - print( - "\nTest set: Average loss: {:.4f}, Accuracy: {:.1f}%\n".format( - test_loss, accuracy - ) - ) - return accuracy - - -for epoch in range(num_epochs): - # train one epoch - model.train() - for batch_idx, (data, target) in enumerate(train_loader): - train_step(model, optimizer, train_loader, data, target, batch_idx, epoch) - - # evaluate - accuracy_unquantized = eval_model(model, test_loader) - - -print("Accuracy of unquantized network: {:.1f}%\n".format(accuracy_unquantized)) - -######################################################################## -# Insert Quantization Layers in the Model -# --------------------------------------- -# Install :py:class:`~.quantization.LinearQuantizer` in the trained model. -# -# Create an instance of the :py:class:`~.quantization.LinearQuantizerConfig` class -# to specify quantization parameters. ``milestones=[0, 1, 2, 1]`` refers to the following: -# -# * *Index 0*: At 0th epoch, observers will start collecting statistics of values of tensors being quantized -# * *Index 1*: At 1st epoch, quantization simulation will begin -# * *Index 2*: At 2nd epoch, observers will stop collecting and quantization parameters will be frozen -# * *Index 3*: At 1st epoch, batch normalization layers will stop collecting mean and variance, and will start running in inference mode - - -from coremltools.optimize.torch.quantization import ( - LinearQuantizer, - LinearQuantizerConfig, - ModuleLinearQuantizerConfig, -) - -global_config = ModuleLinearQuantizerConfig(milestones=[0, 1, 2, 1]) -config = LinearQuantizerConfig(global_config=global_config) - -quantizer = LinearQuantizer(model, config) - -######################################################################## -# Next, call :py:meth:`~.quantization.LinearQuantizer.prepare` to insert fake quantization -# layers in the model. - -qmodel = quantizer.prepare(example_inputs=torch.randn(1, 1, 28, 28)) - -######################################################################## -# Fine-Tuning the Model -# --------------------- -# The next step is to fine tune the model with quantization applied. -# Call :py:meth:`~.quantization.LinearQuantizer.step` to step through the -# quantization milestones. - -optimizer = torch.optim.Adam(qmodel.parameters(), eps=1e-07) -accuracy_quantized = 0.0 -num_epochs = 4 - -for epoch in range(num_epochs): - # train one epoch - model.train() - for batch_idx, (data, target) in enumerate(train_loader): - quantizer.step() - train_step(qmodel, optimizer, train_loader, data, target, batch_idx, epoch) - - # evaluate - accuracy_quantized = eval_model(qmodel, test_loader) - -######################################################################## -# The evaluation shows that you can train a quantized network without a significant loss -# in model accuracy. In practice, for more complex models, -# quantization can be lossy and lead to degradation in validation accuracy. -# In such cases, you can choose to not quantize certain layers which are -# less amenable to quantization. - - -print("Accuracy of quantized network: {:.1f}%\n".format(accuracy_quantized)) -print("Accuracy of unquantized network: {:.1f}%\n".format(accuracy_unquantized)) - -np.testing.assert_allclose(accuracy_quantized, accuracy_unquantized, atol=2) - -######################################################################## -# Finalizing the Model for Export -# ------------------------------- -# -# The example shows that you can quantize the model with a few code changes to your -# existing PyTorch training code. Now you can deploy this model on a device. -# -# To finalize the model for export, call :py:meth:`~.pruning.LinearQuantizer.finalize` -# on the quantizer. This folds the quantization parameters like scale and zero point -# into the weights. - -qmodel.eval() -quantized_model = quantizer.finalize() - -######################################################################## -# Exporting the Model for On-Device Execution -# ------------------------------------------- -# -# In order to deploy the model, convert it to a Core ML model. -# -# Follow the same steps in Core ML Tools for exporting a regular PyTorch model -# (for details, see `Converting from PyTorch `_). -# The parameter ``ct.target.iOS17`` is necessary here because activation quantization -# ops are only supported on iOS versions >= 17. - -import coremltools as ct - -example_input = torch.rand(1, 1, 28, 28) -traced_model = torch.jit.trace(quantized_model, example_input) - -coreml_model = ct.convert( - traced_model, - inputs=[ct.TensorType(shape=example_input.shape)], - minimum_deployment_target=ct.target.iOS17, -) - -coreml_model.save("~/.mnist_qat_data/quantized_model.mlpackage") diff --git a/docs/_examples/linear_quantization.rst b/docs/_examples/linear_quantization.rst deleted file mode 100644 index 02562e4c7..000000000 --- a/docs/_examples/linear_quantization.rst +++ /dev/null @@ -1,356 +0,0 @@ - -.. DO NOT EDIT. -.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. -.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: -.. "_examples/linear_quantization.py" -.. LINE NUMBERS ARE GIVEN BELOW. - -.. only:: html - - .. note:: - :class: sphx-glr-download-link-note - - :ref:`Go to the end ` - to download the full example code. - -.. rst-class:: sphx-glr-example-title - -.. _sphx_glr__examples_linear_quantization.py: - - -.. _linear_quantization_tutorial: - -Linear Quantization -=================== - -.. GENERATED FROM PYTHON SOURCE LINES 11-17 - -In this tutorial, you learn how to train a simple convolutional neural network on -`MNIST `_ using :py:class:`~.quantization.LinearQuantizer`. - -Learn more about other quantization in the coremltools -`Training-Time Quantization Documentation `_. - - -.. GENERATED FROM PYTHON SOURCE LINES 19-23 - -Network and Dataset Definition ------------------------------- -First define your network, which consists of a single convolution layer -followed by a dense (linear) layer. - -.. GENERATED FROM PYTHON SOURCE LINES 23-47 - -.. code-block:: Python - - - from collections import OrderedDict - - import numpy as np - import torch - import torch.nn as nn - import torch.nn.functional as F - - - def mnist_net(num_classes=10): - return nn.Sequential( - OrderedDict( - [ - ("conv", nn.Conv2d(1, 12, 3, padding=1)), - ("relu", nn.ReLU()), - ("pool", nn.MaxPool2d(2, stride=2, padding=0)), - ("flatten", nn.Flatten()), - ("dense", nn.Linear(2352, num_classes)), - ("softmax", nn.LogSoftmax()), - ] - ) - ) - - - -.. GENERATED FROM PYTHON SOURCE LINES 48-51 - -Use the `MNIST dataset provided by PyTorch `_ -for training. Apply a very simple transformation to the input -images to normalize them. - -.. GENERATED FROM PYTHON SOURCE LINES 51-69 - -.. code-block:: Python - - - import os - - from torchvision import datasets, transforms - - - def mnist_dataset(data_dir="~/.mnist_qat_data"): - transform = transforms.Compose( - [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))] - ) - data_path = os.path.expanduser(f"{data_dir}/mnist") - if not os.path.exists(data_path): - os.makedirs(data_path) - train = datasets.MNIST(data_path, train=True, download=True, transform=transform) - test = datasets.MNIST(data_path, train=False, transform=transform) - return train, test - - - -.. GENERATED FROM PYTHON SOURCE LINES 70-71 - -Next, initialize the model and the dataset. - -.. GENERATED FROM PYTHON SOURCE LINES 71-81 - -.. code-block:: Python - - - model = mnist_net() - - batch_size = 128 - train_dataset, test_dataset = mnist_dataset() - train_loader = torch.utils.data.DataLoader( - train_dataset, batch_size=batch_size, shuffle=True - ) - test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size) - - -.. GENERATED FROM PYTHON SOURCE LINES 82-85 - -Training the Model Without Quantization ---------------------------------------- -Train the model without any quantization applied. - -.. GENERATED FROM PYTHON SOURCE LINES 85-143 - -.. code-block:: Python - - - optimizer = torch.optim.Adam(model.parameters(), eps=1e-07) - accuracy_unquantized = 0.0 - num_epochs = 4 - - - def train_step(model, optimizer, train_loader, data, target, batch_idx, epoch): - optimizer.zero_grad() - output = model(data) - loss = F.nll_loss(output, target) - loss.backward() - optimizer.step() - if batch_idx % 100 == 0: - print( - "Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}".format( - epoch, - batch_idx * len(data), - len(train_loader.dataset), - 100.0 * batch_idx / len(train_loader), - loss.item(), - ) - ) - - - def eval_model(model, test_loader): - model.eval() - test_loss = 0 - correct = 0 - with torch.no_grad(): - for data, target in test_loader: - output = model(data) - test_loss += F.nll_loss(output, target, reduction="sum").item() - pred = output.argmax(dim=1, keepdim=True) - correct += pred.eq(target.view_as(pred)).sum().item() - - test_loss /= len(test_loader.dataset) - accuracy = 100.0 * correct / len(test_loader.dataset) - - print( - "\nTest set: Average loss: {:.4f}, Accuracy: {:.1f}%\n".format( - test_loss, accuracy - ) - ) - return accuracy - - - for epoch in range(num_epochs): - # train one epoch - model.train() - for batch_idx, (data, target) in enumerate(train_loader): - train_step(model, optimizer, train_loader, data, target, batch_idx, epoch) - - # evaluate - accuracy_unquantized = eval_model(model, test_loader) - - - print("Accuracy of unquantized network: {:.1f}%\n".format(accuracy_unquantized)) - - -.. GENERATED FROM PYTHON SOURCE LINES 144-155 - -Insert Quantization Layers in the Model ---------------------------------------- -Install :py:class:`~.quantization.LinearQuantizer` in the trained model. - -Create an instance of the :py:class:`~.quantization.LinearQuantizerConfig` class -to specify quantization parameters. ``milestones=[0, 1, 2, 1]`` refers to the following: - -* *Index 0*: At 0th epoch, observers will start collecting statistics of values of tensors being quantized -* *Index 1*: At 1st epoch, quantization simulation will begin -* *Index 2*: At 2nd epoch, observers will stop collecting and quantization parameters will be frozen -* *Index 3*: At 1st epoch, batch normalization layers will stop collecting mean and variance, and will start running in inference mode - -.. GENERATED FROM PYTHON SOURCE LINES 155-168 - -.. code-block:: Python - - - - from coremltools.optimize.torch.quantization import ( - LinearQuantizer, - LinearQuantizerConfig, - ModuleLinearQuantizerConfig, - ) - - global_config = ModuleLinearQuantizerConfig(milestones=[0, 1, 2, 1]) - config = LinearQuantizerConfig(global_config=global_config) - - quantizer = LinearQuantizer(model, config) - - -.. GENERATED FROM PYTHON SOURCE LINES 169-171 - -Next, call :py:meth:`~.quantization.LinearQuantizer.prepare` to insert fake quantization -layers in the model. - -.. GENERATED FROM PYTHON SOURCE LINES 171-174 - -.. code-block:: Python - - - qmodel = quantizer.prepare(example_inputs=torch.randn(1, 1, 28, 28)) - - -.. GENERATED FROM PYTHON SOURCE LINES 175-180 - -Fine-Tuning the Model ---------------------- -The next step is to fine tune the model with quantization applied. -Call :py:meth:`~.quantization.LinearQuantizer.step` to step through the -quantization milestones. - -.. GENERATED FROM PYTHON SOURCE LINES 180-195 - -.. code-block:: Python - - - optimizer = torch.optim.Adam(qmodel.parameters(), eps=1e-07) - accuracy_quantized = 0.0 - num_epochs = 4 - - for epoch in range(num_epochs): - # train one epoch - model.train() - for batch_idx, (data, target) in enumerate(train_loader): - quantizer.step() - train_step(qmodel, optimizer, train_loader, data, target, batch_idx, epoch) - - # evaluate - accuracy_quantized = eval_model(qmodel, test_loader) - - -.. GENERATED FROM PYTHON SOURCE LINES 196-201 - -The evaluation shows that you can train a quantized network without a significant loss -in model accuracy. In practice, for more complex models, -quantization can be lossy and lead to degradation in validation accuracy. -In such cases, you can choose to not quantize certain layers which are -less amenable to quantization. - -.. GENERATED FROM PYTHON SOURCE LINES 201-208 - -.. code-block:: Python - - - - print("Accuracy of quantized network: {:.1f}%\n".format(accuracy_quantized)) - print("Accuracy of unquantized network: {:.1f}%\n".format(accuracy_unquantized)) - - np.testing.assert_allclose(accuracy_quantized, accuracy_unquantized, atol=2) - - -.. GENERATED FROM PYTHON SOURCE LINES 209-218 - -Finalizing the Model for Export -------------------------------- - -The example shows that you can quantize the model with a few code changes to your -existing PyTorch training code. Now you can deploy this model on a device. - -To finalize the model for export, call :py:meth:`~.pruning.LinearQuantizer.finalize` -on the quantizer. This folds the quantization parameters like scale and zero point -into the weights. - -.. GENERATED FROM PYTHON SOURCE LINES 218-222 - -.. code-block:: Python - - - qmodel.eval() - quantized_model = quantizer.finalize() - - -.. GENERATED FROM PYTHON SOURCE LINES 223-232 - -Exporting the Model for On-Device Execution -------------------------------------------- - -In order to deploy the model, convert it to a Core ML model. - -Follow the same steps in Core ML Tools for exporting a regular PyTorch model -(for details, see `Converting from PyTorch `_). -The parameter ``ct.target.iOS17`` is necessary here because activation quantization -ops are only supported on iOS versions >= 17. - -.. GENERATED FROM PYTHON SOURCE LINES 232-245 - -.. code-block:: Python - - - import coremltools as ct - - example_input = torch.rand(1, 1, 28, 28) - traced_model = torch.jit.trace(quantized_model, example_input) - - coreml_model = ct.convert( - traced_model, - inputs=[ct.TensorType(shape=example_input.shape)], - minimum_deployment_target=ct.target.iOS17, - ) - - coreml_model.save("~/.mnist_qat_data/quantized_model.mlpackage") - - -.. _sphx_glr_download__examples_linear_quantization.py: - -.. only:: html - - .. container:: sphx-glr-footer sphx-glr-footer-example - - .. container:: sphx-glr-download sphx-glr-download-jupyter - - :download:`Download Jupyter notebook: linear_quantization.ipynb ` - - .. container:: sphx-glr-download sphx-glr-download-python - - :download:`Download Python source code: linear_quantization.py ` - - .. container:: sphx-glr-download sphx-glr-download-zip - - :download:`Download zipped: linear_quantization.zip ` - - -.. only:: html - - .. rst-class:: sphx-glr-signature - - `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_examples/linear_quantization.zip b/docs/_examples/linear_quantization.zip deleted file mode 100644 index 14c994215..000000000 Binary files a/docs/_examples/linear_quantization.zip and /dev/null differ diff --git a/docs/_examples/magnitude_pruning.codeobj.json b/docs/_examples/magnitude_pruning.codeobj.json deleted file mode 100644 index 96f663194..000000000 --- a/docs/_examples/magnitude_pruning.codeobj.json +++ /dev/null @@ -1,360 +0,0 @@ -{ - ".pruning.MagnitudePruner": [ - { - "is_class": false, - "is_explicit": true, - "module": ".pruning", - "module_short": ".pruning", - "name": "MagnitudePruner" - }, - { - "is_class": false, - "is_explicit": true, - "module": ".pruning", - "module_short": ".pruning", - "name": "MagnitudePruner" - } - ], - ".pruning.MagnitudePruner.finalize": [ - { - "is_class": false, - "is_explicit": true, - "module": ".pruning.MagnitudePruner", - "module_short": ".pruning.MagnitudePruner", - "name": "finalize" - } - ], - ".pruning.MagnitudePruner.prepare": [ - { - "is_class": false, - "is_explicit": true, - "module": ".pruning.MagnitudePruner", - "module_short": ".pruning.MagnitudePruner", - "name": "prepare" - } - ], - ".pruning.MagnitudePruner.step": [ - { - "is_class": false, - "is_explicit": true, - "module": ".pruning.MagnitudePruner", - "module_short": ".pruning.MagnitudePruner", - "name": "step" - } - ], - ".pruning.MagnitudePrunerConfig": [ - { - "is_class": false, - "is_explicit": true, - "module": ".pruning", - "module_short": ".pruning", - "name": "MagnitudePrunerConfig" - } - ], - ".pruning.PolynomialDecayScheduler": [ - { - "is_class": false, - "is_explicit": true, - "module": ".pruning", - "module_short": ".pruning", - "name": "PolynomialDecayScheduler" - } - ], - ".pruning.pruning_scheduler.PruningScheduler": [ - { - "is_class": false, - "is_explicit": true, - "module": ".pruning.pruning_scheduler", - "module_short": ".pruning.pruning_scheduler", - "name": "PruningScheduler" - } - ], - "F.nll_loss": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn.functional", - "module_short": "torch.nn.functional", - "name": "nll_loss" - } - ], - "MagnitudePruner": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools.optimize.torch.pruning", - "module_short": "coremltools.optimize.torch.pruning", - "name": "MagnitudePruner" - } - ], - "MagnitudePrunerConfig": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools.optimize.torch.pruning", - "module_short": "coremltools.optimize.torch.pruning", - "name": "MagnitudePrunerConfig" - } - ], - "ModuleMagnitudePrunerConfig": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools.optimize.torch.pruning", - "module_short": "coremltools.optimize.torch.pruning", - "name": "ModuleMagnitudePrunerConfig" - } - ], - "OrderedDict": [ - { - "is_class": false, - "is_explicit": false, - "module": "collections", - "module_short": "collections", - "name": "OrderedDict" - } - ], - "PolynomialDecayScheduler": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools.optimize.torch.pruning", - "module_short": "coremltools.optimize.torch.pruning", - "name": "PolynomialDecayScheduler" - } - ], - "ct.PassPipeline.DEFAULT_PRUNING": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools.PassPipeline", - "module_short": "coremltools.PassPipeline", - "name": "DEFAULT_PRUNING" - } - ], - "ct.TensorType": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools", - "module_short": "coremltools", - "name": "TensorType" - } - ], - "ct.convert": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools", - "module_short": "coremltools", - "name": "convert" - } - ], - "ct.target.iOS16": [ - { - "is_class": false, - "is_explicit": false, - "module": "coremltools.target", - "module_short": "coremltools.target", - "name": "iOS16" - } - ], - "datasets.MNIST": [ - { - "is_class": false, - "is_explicit": false, - "module": "torchvision.datasets", - "module_short": "torchvision.datasets", - "name": "MNIST" - } - ], - "nn.Conv2d": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "Conv2d" - } - ], - "nn.Flatten": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "Flatten" - } - ], - "nn.Linear": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "Linear" - } - ], - "nn.LogSoftmax": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "LogSoftmax" - } - ], - "nn.MaxPool2d": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "MaxPool2d" - } - ], - "nn.ReLU": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "ReLU" - } - ], - "nn.Sequential": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "Sequential" - } - ], - "np.testing.assert_allclose": [ - { - "is_class": false, - "is_explicit": false, - "module": "numpy.testing", - "module_short": "numpy.testing", - "name": "assert_allclose" - } - ], - "os.makedirs": [ - { - "is_class": false, - "is_explicit": false, - "module": "os", - "module_short": "os", - "name": "makedirs" - } - ], - "os.path.exists": [ - { - "is_class": false, - "is_explicit": false, - "module": "os.path", - "module_short": "os.path", - "name": "exists" - } - ], - "os.path.expanduser": [ - { - "is_class": false, - "is_explicit": false, - "module": "os.path", - "module_short": "os.path", - "name": "expanduser" - } - ], - "torch.jit.trace": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.jit", - "module_short": "torch.jit", - "name": "trace" - } - ], - "torch.nn.Conv2d": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "Conv2d" - } - ], - "torch.nn.Linear": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.nn", - "module_short": "torch.nn", - "name": "Linear" - } - ], - "torch.no_grad": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch", - "module_short": "torch", - "name": "no_grad" - } - ], - "torch.optim.Adam": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.optim", - "module_short": "torch.optim", - "name": "Adam" - } - ], - "torch.rand": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch", - "module_short": "torch", - "name": "rand" - } - ], - "torch.utils.data.DataLoader": [ - { - "is_class": false, - "is_explicit": false, - "module": "torch.utils.data", - "module_short": "torch.utils.data", - "name": "DataLoader" - } - ], - "transforms.Compose": [ - { - "is_class": false, - "is_explicit": false, - "module": "torchvision.transforms", - "module_short": "torchvision.transforms", - "name": "Compose" - } - ], - "transforms.Normalize": [ - { - "is_class": false, - "is_explicit": false, - "module": "torchvision.transforms", - "module_short": "torchvision.transforms", - "name": "Normalize" - } - ], - "transforms.ToTensor": [ - { - "is_class": false, - "is_explicit": false, - "module": "torchvision.transforms", - "module_short": "torchvision.transforms", - "name": "ToTensor" - } - ] -} \ No newline at end of file diff --git a/docs/_build/html/_examples/magnitude_pruning.html b/docs/_examples/magnitude_pruning.html similarity index 98% rename from docs/_build/html/_examples/magnitude_pruning.html rename to docs/_examples/magnitude_pruning.html index d8cc8a4c1..a2f9ca0c2 100644 --- a/docs/_build/html/_examples/magnitude_pruning.html +++ b/docs/_examples/magnitude_pruning.html @@ -1,12 +1,14 @@ + + - Magnitude Pruning — coremltools API Reference 8.0b1 documentation + Magnitude Pruning — coremltools API Reference 8.1 documentation - + @@ -15,15 +17,11 @@ - - - - - - - + + + + + @@ -42,9 +40,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_examples/magnitude_pruning.ipynb b/docs/_examples/magnitude_pruning.ipynb deleted file mode 100644 index a75d76282..000000000 --- a/docs/_examples/magnitude_pruning.ipynb +++ /dev/null @@ -1,237 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n\n# Magnitude Pruning\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In this tutorial, you learn how to train a simple convolutional neural network on\n[MNIST](http://yann.lecun.com/exdb/mnist/) using :py:class:`~.pruning.MagnitudePruner`.\n\nLearn more about other pruners and schedulers in the coremltools \n[Training-Time Pruning Documentation](https://coremltools.readme.io/v7.0/docs/data-dependent-pruning).\n\n\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Network and Dataset Definition\nFirst define your network, which consists of a single convolution layer\nfollowed by a dense (linear) layer.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from collections import OrderedDict\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\n\ndef mnist_net(num_classes=10):\n return nn.Sequential(\n OrderedDict(\n [('conv', nn.Conv2d(1, 12, 3, padding='same')),\n ('relu', nn.ReLU()),\n ('pool', nn.MaxPool2d(2, stride=2, padding=0)),\n ('flatten', nn.Flatten()),\n ('dense', nn.Linear(2352, num_classes)),\n ('softmax', nn.LogSoftmax())]\n )\n )" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Use the [MNIST dataset provided by PyTorch](https://pytorch.org/vision/stable/generated/torchvision.datasets.MNIST.html#mnist)\nfor training. Apply a very simple transformation to the input\nimages to normalize them.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "import os\n\nfrom torchvision import datasets, transforms\n\n\ndef mnist_dataset(data_dir=\"~/.mnist_pruning_data\"):\n transform = transforms.Compose(\n [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]\n )\n data_path = os.path.expanduser(f\"{data_dir}/mnist\")\n if not os.path.exists(data_path):\n os.makedirs(data_path)\n train = datasets.MNIST(data_path, train=True, download=True, transform=transform)\n test = datasets.MNIST(data_path, train=False, transform=transform)\n return train, test" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Next, initialize the model and the dataset.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "model = mnist_net()\n\nbatch_size = 128\ntrain_dataset, test_dataset = mnist_dataset()\ntrain_loader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size, shuffle=True)\ntest_loader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Training the Model Without Pruning\nTrain the model without any pruning applied.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "optimizer = torch.optim.Adam(model.parameters(), eps=1e-07)\naccuracy_unpruned = 0.0\nnum_epochs = 4\n\n\ndef train_step(model, optimizer, train_loader, data, target, batch_idx, epoch):\n optimizer.zero_grad()\n output = model(data)\n loss = F.nll_loss(output, target)\n loss.backward()\n optimizer.step()\n if batch_idx % 100 == 0:\n print('Train Epoch: {} [{}/{} ({:.0f}%)]\\tLoss: {:.6f}'.format(\n epoch, batch_idx * len(data), len(train_loader.dataset),\n 100. * batch_idx / len(train_loader), loss.item()))\n\n\ndef eval_model(model, test_loader):\n model.eval()\n test_loss = 0\n correct = 0\n with torch.no_grad():\n for data, target in test_loader:\n output = model(data)\n test_loss += F.nll_loss(output, target, reduction='sum').item()\n pred = output.argmax(dim=1, keepdim=True)\n correct += pred.eq(target.view_as(pred)).sum().item()\n\n test_loss /= len(test_loader.dataset)\n accuracy = 100. * correct / len(test_loader.dataset)\n\n print(\n \"\\nTest set: Average loss: {:.4f}, Accuracy: {:.1f}%\\n\".format(\n test_loss, accuracy\n )\n )\n return accuracy\n\n\nfor epoch in range(num_epochs):\n # train one epoch\n model.train()\n for batch_idx, (data, target) in enumerate(train_loader):\n train_step(model, optimizer, train_loader, data, target, batch_idx, epoch)\n\n # evaluate\n accuracy_unpruned = eval_model(model, test_loader)\n\n\nprint(\"Accuracy of unpruned network: {:.1f}%\\n\".format(accuracy_unpruned))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Installing the Pruner in the Model\nInstall :py:class:`~.pruning.MagnitudePruner` in the trained model.\n\nFirst, construct a :py:class:`~.pruning.pruning_scheduler.PruningScheduler` class,\nwhich specifies how the sparsity of your pruned layers should evolve over the course of the training.\nFor this tutorial, use a :py:class:`~.pruning.PolynomialDecayScheduler`,\nwhich is introduced in the paper [\"To prune or not to prune\"](https://arxiv.org/pdf/1710.01878.pdf).\n\nBegin pruning from step ``0`` and prune every ``100`` steps for two epochs. As you\nstep through this pruning scheduler, the sparsity of pruned modules will increase\ngradually from the initial value to the target value.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from coremltools.optimize.torch.pruning import PolynomialDecayScheduler\n\nscheduler = PolynomialDecayScheduler(update_steps=list(range(0, 900, 100)))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Next, create an instance of the :py:class:`~.pruning.MagnitudePrunerConfig` class\nto specify how you want different submodules to be pruned.\nSet the target sparsity of the convolution layer\nto ``70 %`` and the dense layer to ``80 %``. The point of this is to demonstrate that\ndifferent layers can be targeted at different sparsity levels. In practice, the sparsity\nlevel of a layer is a hyperparameter, which needs to be tuned for your requirements and\nthe amenability of the layer to sparsification.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "from coremltools.optimize.torch.pruning import (\n MagnitudePruner,\n MagnitudePrunerConfig,\n ModuleMagnitudePrunerConfig,\n)\n\nconv_config = ModuleMagnitudePrunerConfig(target_sparsity=0.7)\nlinear_config = ModuleMagnitudePrunerConfig(target_sparsity=0.8)\n\nconfig = MagnitudePrunerConfig().set_module_type(torch.nn.Conv2d, conv_config)\nconfig = config.set_module_type(torch.nn.Linear, linear_config)\n\npruner = MagnitudePruner(model, config)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Next, call :py:meth:`~.pruning.MagnitudePruner.prepare` to insert pruning\n``forward pre hooks`` on the modules configured previously.\nThese forward pre hooks are called before a call to the forward\nmethod of the module. They multiply the parameter with a pruning mask, which\nis a tensor of the same shape as the parameter, in which each element has a value of\neither ``1`` or ``0``.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "pruner.prepare(inplace=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Fine-Tuning the Pruned Model\nThe next step is to fine tune the model with pruning applied. In order to prune the model,\ncall the :py:meth:`~.pruning.MagnitudePruner.step` method on the pruner\nafter every call to ``optimizer.step()`` to step through the pruning schedule.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "optimizer = torch.optim.Adam(model.parameters(), eps=1e-07)\naccuracy_pruned = 0.0\nnum_epochs = 2\n\nfor epoch in range(num_epochs):\n # train one epoch\n model.train()\n for batch_idx, (data, target) in enumerate(train_loader):\n train_step(model, optimizer, train_loader, data, target, batch_idx, epoch)\n pruner.step()\n\n # evaluate\n accuracy_pruned = eval_model(model, test_loader)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The evaluation shows that you can train a pruned network without losing\naccuracy with the final model. In practice, for more complex models,\nyou have a trade-off between the sparsity and the validation accuracy\nthat can be achieved for the model. Finding the right sweet spot on this\ntrade-off curve depends on the model and task.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "print(\"Accuracy of pruned network: {:.1f}%\\n\".format(accuracy_pruned))\nprint(\"Accuracy of unpruned network: {:.1f}%\\n\".format(accuracy_unpruned))\n\nnp.testing.assert_allclose(accuracy_pruned, accuracy_unpruned, atol=2)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Finalizing the Model for Export\n\nThe example shows that you can prune the model with a few code changes to your\nexisting PyTorch training code. Now you can deploy this model on a device.\n\nTo finalize the model for export, call :py:meth:`~.pruning.MagnitudePruner.finalize`\non the pruner. This removes all the forward pre-hooks you had attached on the submodules.\nIt also freezes the state of the pruner and multiplies the pruning mask with the corresponding\nweight matrix.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "model.eval()\npruner.finalize(inplace=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Exporting the Model for On-Device Execution\n\nIn order to deploy the model, convert it to a Core ML model.\n\nFollow the same steps in Core ML Tools for exporting a regular PyTorch model\n(for details, see [Converting from PyTorch](https://coremltools.readme.io/docs/pytorch-conversion)).\nThe parameter ``ct.PassPipeline.DEFAULT_PRUNING`` signals to the converter that\nthe model being converted is a pruned model, and allows the model weights to be represented as\nsparse matrices, which have a smaller memory footprint than dense matrices.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "import coremltools as ct\n\nexample_input = torch.rand(1, 1, 28, 28)\ntraced_model = torch.jit.trace(model, example_input)\n\ncoreml_model = ct.convert(\n traced_model,\n inputs=[ct.TensorType(shape=example_input.shape)],\n pass_pipeline=ct.PassPipeline.DEFAULT_PRUNING,\n minimum_deployment_target=ct.target.iOS16,\n)\n\ncoreml_model.save(\"~/.mnist_pruning_data/pruned_model.mlpackage\")" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.14" - } - }, - "nbformat": 4, - "nbformat_minor": 0 -} \ No newline at end of file diff --git a/docs/_examples/magnitude_pruning.py b/docs/_examples/magnitude_pruning.py deleted file mode 100644 index 2f5797e9a..000000000 --- a/docs/_examples/magnitude_pruning.py +++ /dev/null @@ -1,257 +0,0 @@ -# -*- coding: utf-8 -*- -""" -.. _magnitude_pruning_tutorial: - -Magnitude Pruning -================= - -""" - -######################################################################## -# In this tutorial, you learn how to train a simple convolutional neural network on -# `MNIST `_ using :py:class:`~.pruning.MagnitudePruner`. -# -# Learn more about other pruners and schedulers in the coremltools -# `Training-Time Pruning Documentation `_. -# - -######################################################################## -# Network and Dataset Definition -# ------------------------------ -# First define your network, which consists of a single convolution layer -# followed by a dense (linear) layer. - -from collections import OrderedDict - -import numpy as np -import torch -import torch.nn as nn -import torch.nn.functional as F - - -def mnist_net(num_classes=10): - return nn.Sequential( - OrderedDict( - [('conv', nn.Conv2d(1, 12, 3, padding='same')), - ('relu', nn.ReLU()), - ('pool', nn.MaxPool2d(2, stride=2, padding=0)), - ('flatten', nn.Flatten()), - ('dense', nn.Linear(2352, num_classes)), - ('softmax', nn.LogSoftmax())] - ) - ) - - -######################################################################## -# Use the `MNIST dataset provided by PyTorch `_ -# for training. Apply a very simple transformation to the input -# images to normalize them. - -import os - -from torchvision import datasets, transforms - - -def mnist_dataset(data_dir="~/.mnist_pruning_data"): - transform = transforms.Compose( - [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))] - ) - data_path = os.path.expanduser(f"{data_dir}/mnist") - if not os.path.exists(data_path): - os.makedirs(data_path) - train = datasets.MNIST(data_path, train=True, download=True, transform=transform) - test = datasets.MNIST(data_path, train=False, transform=transform) - return train, test - -######################################################################## -# Next, initialize the model and the dataset. - - -model = mnist_net() - -batch_size = 128 -train_dataset, test_dataset = mnist_dataset() -train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size, shuffle=True) -test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size) - -######################################################################## -# Training the Model Without Pruning -# ---------------------------------- -# Train the model without any pruning applied. - -optimizer = torch.optim.Adam(model.parameters(), eps=1e-07) -accuracy_unpruned = 0.0 -num_epochs = 4 - - -def train_step(model, optimizer, train_loader, data, target, batch_idx, epoch): - optimizer.zero_grad() - output = model(data) - loss = F.nll_loss(output, target) - loss.backward() - optimizer.step() - if batch_idx % 100 == 0: - print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format( - epoch, batch_idx * len(data), len(train_loader.dataset), - 100. * batch_idx / len(train_loader), loss.item())) - - -def eval_model(model, test_loader): - model.eval() - test_loss = 0 - correct = 0 - with torch.no_grad(): - for data, target in test_loader: - output = model(data) - test_loss += F.nll_loss(output, target, reduction='sum').item() - pred = output.argmax(dim=1, keepdim=True) - correct += pred.eq(target.view_as(pred)).sum().item() - - test_loss /= len(test_loader.dataset) - accuracy = 100. * correct / len(test_loader.dataset) - - print( - "\nTest set: Average loss: {:.4f}, Accuracy: {:.1f}%\n".format( - test_loss, accuracy - ) - ) - return accuracy - - -for epoch in range(num_epochs): - # train one epoch - model.train() - for batch_idx, (data, target) in enumerate(train_loader): - train_step(model, optimizer, train_loader, data, target, batch_idx, epoch) - - # evaluate - accuracy_unpruned = eval_model(model, test_loader) - - -print("Accuracy of unpruned network: {:.1f}%\n".format(accuracy_unpruned)) - -######################################################################## -# Installing the Pruner in the Model -# ---------------------------------- -# Install :py:class:`~.pruning.MagnitudePruner` in the trained model. -# -# First, construct a :py:class:`~.pruning.pruning_scheduler.PruningScheduler` class, -# which specifies how the sparsity of your pruned layers should evolve over the course of the training. -# For this tutorial, use a :py:class:`~.pruning.PolynomialDecayScheduler`, -# which is introduced in the paper `"To prune or not to prune" `_. -# -# Begin pruning from step ``0`` and prune every ``100`` steps for two epochs. As you -# step through this pruning scheduler, the sparsity of pruned modules will increase -# gradually from the initial value to the target value. - -from coremltools.optimize.torch.pruning import PolynomialDecayScheduler - -scheduler = PolynomialDecayScheduler(update_steps=list(range(0, 900, 100))) - -####################################################################### -# Next, create an instance of the :py:class:`~.pruning.MagnitudePrunerConfig` class -# to specify how you want different submodules to be pruned. -# Set the target sparsity of the convolution layer -# to ``70 %`` and the dense layer to ``80 %``. The point of this is to demonstrate that -# different layers can be targeted at different sparsity levels. In practice, the sparsity -# level of a layer is a hyperparameter, which needs to be tuned for your requirements and -# the amenability of the layer to sparsification. - - -from coremltools.optimize.torch.pruning import ( - MagnitudePruner, - MagnitudePrunerConfig, - ModuleMagnitudePrunerConfig, -) - -conv_config = ModuleMagnitudePrunerConfig(target_sparsity=0.7) -linear_config = ModuleMagnitudePrunerConfig(target_sparsity=0.8) - -config = MagnitudePrunerConfig().set_module_type(torch.nn.Conv2d, conv_config) -config = config.set_module_type(torch.nn.Linear, linear_config) - -pruner = MagnitudePruner(model, config) - -######################################################################## -# Next, call :py:meth:`~.pruning.MagnitudePruner.prepare` to insert pruning -# ``forward pre hooks`` on the modules configured previously. -# These forward pre hooks are called before a call to the forward -# method of the module. They multiply the parameter with a pruning mask, which -# is a tensor of the same shape as the parameter, in which each element has a value of -# either ``1`` or ``0``. - -pruner.prepare(inplace=True) - -######################################################################## -# Fine-Tuning the Pruned Model -# ---------------------------- -# The next step is to fine tune the model with pruning applied. In order to prune the model, -# call the :py:meth:`~.pruning.MagnitudePruner.step` method on the pruner -# after every call to ``optimizer.step()`` to step through the pruning schedule. - -optimizer = torch.optim.Adam(model.parameters(), eps=1e-07) -accuracy_pruned = 0.0 -num_epochs = 2 - -for epoch in range(num_epochs): - # train one epoch - model.train() - for batch_idx, (data, target) in enumerate(train_loader): - train_step(model, optimizer, train_loader, data, target, batch_idx, epoch) - pruner.step() - - # evaluate - accuracy_pruned = eval_model(model, test_loader) - -######################################################################## -# The evaluation shows that you can train a pruned network without losing -# accuracy with the final model. In practice, for more complex models, -# you have a trade-off between the sparsity and the validation accuracy -# that can be achieved for the model. Finding the right sweet spot on this -# trade-off curve depends on the model and task. - -print("Accuracy of pruned network: {:.1f}%\n".format(accuracy_pruned)) -print("Accuracy of unpruned network: {:.1f}%\n".format(accuracy_unpruned)) - -np.testing.assert_allclose(accuracy_pruned, accuracy_unpruned, atol=2) - -######################################################################## -# Finalizing the Model for Export -# ------------------------------- -# -# The example shows that you can prune the model with a few code changes to your -# existing PyTorch training code. Now you can deploy this model on a device. -# -# To finalize the model for export, call :py:meth:`~.pruning.MagnitudePruner.finalize` -# on the pruner. This removes all the forward pre-hooks you had attached on the submodules. -# It also freezes the state of the pruner and multiplies the pruning mask with the corresponding -# weight matrix. - -model.eval() -pruner.finalize(inplace=True) - -######################################################################## -# Exporting the Model for On-Device Execution -# ------------------------------------------- -# -# In order to deploy the model, convert it to a Core ML model. -# -# Follow the same steps in Core ML Tools for exporting a regular PyTorch model -# (for details, see `Converting from PyTorch `_). -# The parameter ``ct.PassPipeline.DEFAULT_PRUNING`` signals to the converter that -# the model being converted is a pruned model, and allows the model weights to be represented as -# sparse matrices, which have a smaller memory footprint than dense matrices. - -import coremltools as ct - -example_input = torch.rand(1, 1, 28, 28) -traced_model = torch.jit.trace(model, example_input) - -coreml_model = ct.convert( - traced_model, - inputs=[ct.TensorType(shape=example_input.shape)], - pass_pipeline=ct.PassPipeline.DEFAULT_PRUNING, - minimum_deployment_target=ct.target.iOS16, -) - -coreml_model.save("~/.mnist_pruning_data/pruned_model.mlpackage") diff --git a/docs/_examples/magnitude_pruning.rst b/docs/_examples/magnitude_pruning.rst deleted file mode 100644 index e4dabe975..000000000 --- a/docs/_examples/magnitude_pruning.rst +++ /dev/null @@ -1,376 +0,0 @@ - -.. DO NOT EDIT. -.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. -.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: -.. "_examples/magnitude_pruning.py" -.. LINE NUMBERS ARE GIVEN BELOW. - -.. only:: html - - .. note:: - :class: sphx-glr-download-link-note - - :ref:`Go to the end ` - to download the full example code. - -.. rst-class:: sphx-glr-example-title - -.. _sphx_glr__examples_magnitude_pruning.py: - - -.. _magnitude_pruning_tutorial: - -Magnitude Pruning -================= - -.. GENERATED FROM PYTHON SOURCE LINES 11-17 - -In this tutorial, you learn how to train a simple convolutional neural network on -`MNIST `_ using :py:class:`~.pruning.MagnitudePruner`. - -Learn more about other pruners and schedulers in the coremltools -`Training-Time Pruning Documentation `_. - - -.. GENERATED FROM PYTHON SOURCE LINES 19-23 - -Network and Dataset Definition ------------------------------- -First define your network, which consists of a single convolution layer -followed by a dense (linear) layer. - -.. GENERATED FROM PYTHON SOURCE LINES 23-45 - -.. code-block:: Python - - - from collections import OrderedDict - - import numpy as np - import torch - import torch.nn as nn - import torch.nn.functional as F - - - def mnist_net(num_classes=10): - return nn.Sequential( - OrderedDict( - [('conv', nn.Conv2d(1, 12, 3, padding='same')), - ('relu', nn.ReLU()), - ('pool', nn.MaxPool2d(2, stride=2, padding=0)), - ('flatten', nn.Flatten()), - ('dense', nn.Linear(2352, num_classes)), - ('softmax', nn.LogSoftmax())] - ) - ) - - - -.. GENERATED FROM PYTHON SOURCE LINES 46-49 - -Use the `MNIST dataset provided by PyTorch `_ -for training. Apply a very simple transformation to the input -images to normalize them. - -.. GENERATED FROM PYTHON SOURCE LINES 49-66 - -.. code-block:: Python - - - import os - - from torchvision import datasets, transforms - - - def mnist_dataset(data_dir="~/.mnist_pruning_data"): - transform = transforms.Compose( - [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))] - ) - data_path = os.path.expanduser(f"{data_dir}/mnist") - if not os.path.exists(data_path): - os.makedirs(data_path) - train = datasets.MNIST(data_path, train=True, download=True, transform=transform) - test = datasets.MNIST(data_path, train=False, transform=transform) - return train, test - - -.. GENERATED FROM PYTHON SOURCE LINES 67-68 - -Next, initialize the model and the dataset. - -.. GENERATED FROM PYTHON SOURCE LINES 68-77 - -.. code-block:: Python - - - - model = mnist_net() - - batch_size = 128 - train_dataset, test_dataset = mnist_dataset() - train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size, shuffle=True) - test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=batch_size) - - -.. GENERATED FROM PYTHON SOURCE LINES 78-81 - -Training the Model Without Pruning ----------------------------------- -Train the model without any pruning applied. - -.. GENERATED FROM PYTHON SOURCE LINES 81-133 - -.. code-block:: Python - - - optimizer = torch.optim.Adam(model.parameters(), eps=1e-07) - accuracy_unpruned = 0.0 - num_epochs = 4 - - - def train_step(model, optimizer, train_loader, data, target, batch_idx, epoch): - optimizer.zero_grad() - output = model(data) - loss = F.nll_loss(output, target) - loss.backward() - optimizer.step() - if batch_idx % 100 == 0: - print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format( - epoch, batch_idx * len(data), len(train_loader.dataset), - 100. * batch_idx / len(train_loader), loss.item())) - - - def eval_model(model, test_loader): - model.eval() - test_loss = 0 - correct = 0 - with torch.no_grad(): - for data, target in test_loader: - output = model(data) - test_loss += F.nll_loss(output, target, reduction='sum').item() - pred = output.argmax(dim=1, keepdim=True) - correct += pred.eq(target.view_as(pred)).sum().item() - - test_loss /= len(test_loader.dataset) - accuracy = 100. * correct / len(test_loader.dataset) - - print( - "\nTest set: Average loss: {:.4f}, Accuracy: {:.1f}%\n".format( - test_loss, accuracy - ) - ) - return accuracy - - - for epoch in range(num_epochs): - # train one epoch - model.train() - for batch_idx, (data, target) in enumerate(train_loader): - train_step(model, optimizer, train_loader, data, target, batch_idx, epoch) - - # evaluate - accuracy_unpruned = eval_model(model, test_loader) - - - print("Accuracy of unpruned network: {:.1f}%\n".format(accuracy_unpruned)) - - -.. GENERATED FROM PYTHON SOURCE LINES 134-146 - -Installing the Pruner in the Model ----------------------------------- -Install :py:class:`~.pruning.MagnitudePruner` in the trained model. - -First, construct a :py:class:`~.pruning.pruning_scheduler.PruningScheduler` class, -which specifies how the sparsity of your pruned layers should evolve over the course of the training. -For this tutorial, use a :py:class:`~.pruning.PolynomialDecayScheduler`, -which is introduced in the paper `"To prune or not to prune" `_. - -Begin pruning from step ``0`` and prune every ``100`` steps for two epochs. As you -step through this pruning scheduler, the sparsity of pruned modules will increase -gradually from the initial value to the target value. - -.. GENERATED FROM PYTHON SOURCE LINES 146-151 - -.. code-block:: Python - - - from coremltools.optimize.torch.pruning import PolynomialDecayScheduler - - scheduler = PolynomialDecayScheduler(update_steps=list(range(0, 900, 100))) - - -.. GENERATED FROM PYTHON SOURCE LINES 152-159 - -Next, create an instance of the :py:class:`~.pruning.MagnitudePrunerConfig` class -to specify how you want different submodules to be pruned. -Set the target sparsity of the convolution layer -to ``70 %`` and the dense layer to ``80 %``. The point of this is to demonstrate that -different layers can be targeted at different sparsity levels. In practice, the sparsity -level of a layer is a hyperparameter, which needs to be tuned for your requirements and -the amenability of the layer to sparsification. - -.. GENERATED FROM PYTHON SOURCE LINES 159-175 - -.. code-block:: Python - - - - from coremltools.optimize.torch.pruning import ( - MagnitudePruner, - MagnitudePrunerConfig, - ModuleMagnitudePrunerConfig, - ) - - conv_config = ModuleMagnitudePrunerConfig(target_sparsity=0.7) - linear_config = ModuleMagnitudePrunerConfig(target_sparsity=0.8) - - config = MagnitudePrunerConfig().set_module_type(torch.nn.Conv2d, conv_config) - config = config.set_module_type(torch.nn.Linear, linear_config) - - pruner = MagnitudePruner(model, config) - - -.. GENERATED FROM PYTHON SOURCE LINES 176-182 - -Next, call :py:meth:`~.pruning.MagnitudePruner.prepare` to insert pruning -``forward pre hooks`` on the modules configured previously. -These forward pre hooks are called before a call to the forward -method of the module. They multiply the parameter with a pruning mask, which -is a tensor of the same shape as the parameter, in which each element has a value of -either ``1`` or ``0``. - -.. GENERATED FROM PYTHON SOURCE LINES 182-185 - -.. code-block:: Python - - - pruner.prepare(inplace=True) - - -.. GENERATED FROM PYTHON SOURCE LINES 186-191 - -Fine-Tuning the Pruned Model ----------------------------- -The next step is to fine tune the model with pruning applied. In order to prune the model, -call the :py:meth:`~.pruning.MagnitudePruner.step` method on the pruner -after every call to ``optimizer.step()`` to step through the pruning schedule. - -.. GENERATED FROM PYTHON SOURCE LINES 191-206 - -.. code-block:: Python - - - optimizer = torch.optim.Adam(model.parameters(), eps=1e-07) - accuracy_pruned = 0.0 - num_epochs = 2 - - for epoch in range(num_epochs): - # train one epoch - model.train() - for batch_idx, (data, target) in enumerate(train_loader): - train_step(model, optimizer, train_loader, data, target, batch_idx, epoch) - pruner.step() - - # evaluate - accuracy_pruned = eval_model(model, test_loader) - - -.. GENERATED FROM PYTHON SOURCE LINES 207-212 - -The evaluation shows that you can train a pruned network without losing -accuracy with the final model. In practice, for more complex models, -you have a trade-off between the sparsity and the validation accuracy -that can be achieved for the model. Finding the right sweet spot on this -trade-off curve depends on the model and task. - -.. GENERATED FROM PYTHON SOURCE LINES 212-218 - -.. code-block:: Python - - - print("Accuracy of pruned network: {:.1f}%\n".format(accuracy_pruned)) - print("Accuracy of unpruned network: {:.1f}%\n".format(accuracy_unpruned)) - - np.testing.assert_allclose(accuracy_pruned, accuracy_unpruned, atol=2) - - -.. GENERATED FROM PYTHON SOURCE LINES 219-229 - -Finalizing the Model for Export -------------------------------- - -The example shows that you can prune the model with a few code changes to your -existing PyTorch training code. Now you can deploy this model on a device. - -To finalize the model for export, call :py:meth:`~.pruning.MagnitudePruner.finalize` -on the pruner. This removes all the forward pre-hooks you had attached on the submodules. -It also freezes the state of the pruner and multiplies the pruning mask with the corresponding -weight matrix. - -.. GENERATED FROM PYTHON SOURCE LINES 229-233 - -.. code-block:: Python - - - model.eval() - pruner.finalize(inplace=True) - - -.. GENERATED FROM PYTHON SOURCE LINES 234-244 - -Exporting the Model for On-Device Execution -------------------------------------------- - -In order to deploy the model, convert it to a Core ML model. - -Follow the same steps in Core ML Tools for exporting a regular PyTorch model -(for details, see `Converting from PyTorch `_). -The parameter ``ct.PassPipeline.DEFAULT_PRUNING`` signals to the converter that -the model being converted is a pruned model, and allows the model weights to be represented as -sparse matrices, which have a smaller memory footprint than dense matrices. - -.. GENERATED FROM PYTHON SOURCE LINES 244-258 - -.. code-block:: Python - - - import coremltools as ct - - example_input = torch.rand(1, 1, 28, 28) - traced_model = torch.jit.trace(model, example_input) - - coreml_model = ct.convert( - traced_model, - inputs=[ct.TensorType(shape=example_input.shape)], - pass_pipeline=ct.PassPipeline.DEFAULT_PRUNING, - minimum_deployment_target=ct.target.iOS16, - ) - - coreml_model.save("~/.mnist_pruning_data/pruned_model.mlpackage") - - -.. _sphx_glr_download__examples_magnitude_pruning.py: - -.. only:: html - - .. container:: sphx-glr-footer sphx-glr-footer-example - - .. container:: sphx-glr-download sphx-glr-download-jupyter - - :download:`Download Jupyter notebook: magnitude_pruning.ipynb ` - - .. container:: sphx-glr-download sphx-glr-download-python - - :download:`Download Python source code: magnitude_pruning.py ` - - .. container:: sphx-glr-download sphx-glr-download-zip - - :download:`Download zipped: magnitude_pruning.zip ` - - -.. only:: html - - .. rst-class:: sphx-glr-signature - - `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_examples/magnitude_pruning.zip b/docs/_examples/magnitude_pruning.zip deleted file mode 100644 index 08b422ad0..000000000 Binary files a/docs/_examples/magnitude_pruning.zip and /dev/null differ diff --git a/docs/_build/html/_examples/sg_execution_times.html b/docs/_examples/sg_execution_times.html similarity index 90% rename from docs/_build/html/_examples/sg_execution_times.html rename to docs/_examples/sg_execution_times.html index f117ba25f..106cc4621 100644 --- a/docs/_build/html/_examples/sg_execution_times.html +++ b/docs/_examples/sg_execution_times.html @@ -1,12 +1,14 @@ + + - Computation times — coremltools API Reference 8.0b1 documentation + Computation times — coremltools API Reference 8.1 documentation - + @@ -15,15 +17,11 @@ - - - - - - - + + + + + @@ -40,9 +38,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_examples/sg_execution_times.rst b/docs/_examples/sg_execution_times.rst deleted file mode 100644 index 1e646c6f5..000000000 --- a/docs/_examples/sg_execution_times.rst +++ /dev/null @@ -1,37 +0,0 @@ - -:orphan: - -.. _sphx_glr__examples_sg_execution_times: - - -Computation times -================= -**00:00.000** total execution time for 1 file **from _examples**: - -.. container:: - - .. raw:: html - - - - - - - - .. list-table:: - :header-rows: 1 - :class: table table-striped sg-datatable - - * - Example - - Time - - Mem (MB) - * - :ref:`sphx_glr__examples_magnitude_pruning.py` (``magnitude_pruning.py``) - - 00:00.000 - - 0.0 diff --git a/docs/_build/html/_images/logo.png b/docs/_images/logo.png similarity index 100% rename from docs/_build/html/_images/logo.png rename to docs/_images/logo.png diff --git a/docs/_build/html/_images/sphx_glr_linear_quantization_thumb.png b/docs/_images/sphx_glr_linear_quantization_thumb.png similarity index 100% rename from docs/_build/html/_images/sphx_glr_linear_quantization_thumb.png rename to docs/_images/sphx_glr_linear_quantization_thumb.png diff --git a/docs/_build/html/_modules/coremltools/converters/_converters_entry.html b/docs/_modules/coremltools/converters/_converters_entry.html similarity index 98% rename from docs/_build/html/_modules/coremltools/converters/_converters_entry.html rename to docs/_modules/coremltools/converters/_converters_entry.html index b5ec38878..c92ce922e 100644 --- a/docs/_build/html/_modules/coremltools/converters/_converters_entry.html +++ b/docs/_modules/coremltools/converters/_converters_entry.html @@ -1,11 +1,13 @@ + + - coremltools.converters._converters_entry — coremltools API Reference 8.0b1 documentation + coremltools.converters._converters_entry — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -939,7 +934,9 @@

    Source code for coremltools.converters._converters_entry

    flat_inputs = _flatten_list(inputs) for flat_input in flat_inputs: if not isinstance(flat_input, InputType): - raise ValueError("inputs must be a list of type ct.TensorType or ct.ImageType") + raise ValueError( + "inputs must be a list of type ct.TensorType, ct.ImageType, or ct.StateType" + ) if flat_input.dtype == types.fp16: if not ( minimum_deployment_target is not None @@ -1020,13 +1017,6 @@

    Source code for coremltools.converters._converters_entry

    f"Conversion for models with only ATEN or EDGE dialect is supported/tested. Provided Dialect: {model.dialect}" ) - # TODO: rdar://115845792 ([Executorch] Handle user provided inputs/outputs in the convert API) - if inputs is not None: - raise AssertionError("'inputs' argument should be None for ExportedProgram") - - if outputs is not None: - raise AssertionError("'outputs' argument should be None for ExportedProgram") - else: if is_torch_model(model): if inputs is None: diff --git a/docs/_build/html/_modules/coremltools/converters/libsvm/_libsvm_converter.html b/docs/_modules/coremltools/converters/libsvm/_libsvm_converter.html similarity index 98% rename from docs/_build/html/_modules/coremltools/converters/libsvm/_libsvm_converter.html rename to docs/_modules/coremltools/converters/libsvm/_libsvm_converter.html index b0d2bf34e..5adb7f6a1 100644 --- a/docs/_build/html/_modules/coremltools/converters/libsvm/_libsvm_converter.html +++ b/docs/_modules/coremltools/converters/libsvm/_libsvm_converter.html @@ -1,11 +1,13 @@ + + - coremltools.converters.libsvm._libsvm_converter — coremltools API Reference 8.0b1 documentation + coremltools.converters.libsvm._libsvm_converter — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/debugging_utils.html b/docs/_modules/coremltools/converters/mil/debugging_utils.html similarity index 97% rename from docs/_build/html/_modules/coremltools/converters/mil/debugging_utils.html rename to docs/_modules/coremltools/converters/mil/debugging_utils.html index f45cce988..efc6135eb 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/debugging_utils.html +++ b/docs/_modules/coremltools/converters/mil/debugging_utils.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.debugging_utils — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.debugging_utils — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -103,12 +98,12 @@

    Source code for coremltools.converters.mil.debugging_utils

    from typing import List, Optional import coremltools as ct -from coremltools.models import MLModel +from coremltools.converters.mil.frontend.milproto.load import load as milproto_to_pymil from coremltools.converters.mil.mil import Builder as mb from coremltools.converters.mil.mil.passes.helper import block_context_manager from coremltools.converters.mil.mil.passes.pass_registry import PASS_REGISTRY -from coremltools.converters.mil.frontend.milproto.load import \ - load as milproto_to_pymil +from coremltools.models import MLModel +
    [docs] diff --git a/docs/_build/html/_modules/coremltools/converters/mil/input_types.html b/docs/_modules/coremltools/converters/mil/input_types.html similarity index 99% rename from docs/_build/html/_modules/coremltools/converters/mil/input_types.html rename to docs/_modules/coremltools/converters/mil/input_types.html index 58f186970..7179d5dbf 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/input_types.html +++ b/docs/_modules/coremltools/converters/mil/input_types.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.input_types — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.input_types — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/builder.html b/docs/_modules/coremltools/converters/mil/mil/builder.html similarity index 89% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/builder.html rename to docs/_modules/coremltools/converters/mil/mil/builder.html index 5c78d19ad..9822be367 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/builder.html +++ b/docs/_modules/coremltools/converters/mil/mil/builder.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.builder — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.builder — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -120,12 +115,13 @@

    Source code for coremltools.converters.mil.mil.builder

    from .scope import ( SCOPE_STACK, VALID_OPS_TO_COPY_SCOPE_INFO, - ScopeContextManger, + ScopeContextManager, ScopeInfo, ScopeSource, ) from .var import InternalVar, Var +_BEFORE_OP_STACK: List["mil.Operation"] = [] def is_python_value(val): return ( @@ -136,6 +132,29 @@

    Source code for coremltools.converters.mil.mil.builder

    or (isinstance(val, (tuple, list)) and all(is_python_value(v) for v in val)) ) +class BeforeOpContextManager: + def __init__(self, before_op: "mil.Operation"): + """ + A context manager which makes the operations created within it contructed before the target ``before_op``. + + Parameters + ---------- + before_op: Operation + * The anchor op where the new op is going to be created at (right before `before_op`). + * If the users explicity specify ``before_op`` when creating ``Operation`` object under this context manager, the builder + will respect the one provided by the users. + """ + if not isinstance(before_op, mil.Operation) and before_op is not None: + raise ValueError( + f"mb.set_before_op only accepts input of type Operation. Got {type(before_op)}." + ) + self.before_op = before_op + + def __enter__(self): + _BEFORE_OP_STACK.append(self.before_op) + + def __exit__(self, type, value, traceback): + _BEFORE_OP_STACK.pop()
    [docs] @@ -187,8 +206,8 @@

    Source code for coremltools.converters.mil.mil.builder

    err_msg = f"Cannot add const {val}" if any_symbolic(val): err_msg += ( - "\nPython native vals (list, tuple), np.array that are" - + "operation inputs cannot have symbolic values. Consider feeding" + "\nPython native vals (list, tuple), np.array that are " + + "operation inputs cannot have symbolic values. Consider feeding " + "symbolic shape in through placeholder and use mb.shape() " + f"operator. Input {name}: {val}" ) @@ -269,7 +288,13 @@

    Source code for coremltools.converters.mil.mil.builder

    logger.debug( "Adding op '{}' of type {}".format(kwargs["name"], op_cls.__name__) ) + + # If before_op is explicitly passed, the builder will respect it, + # otherwise it will refer to _BEFORE_OP_STACK. before_op = kwargs.get("before_op", None) + if before_op is None and len(_BEFORE_OP_STACK) != 0: + before_op = _BEFORE_OP_STACK[-1] + # Shallow copy list inputs to ensure op inputs are immutable kwargs = {k: v if not isinstance(v, (list, tuple)) else v[:] for k, v in kwargs.items() if v is not None} kwargs.update(cls._create_vars( @@ -435,10 +460,24 @@

    Source code for coremltools.converters.mil.mil.builder

    return wrapper
    + @staticmethod + def set_before_op(before_op: "mil.Operation") -> BeforeOpContextManager: + """ + The ``mb.set_before_op`` creates a context manager, which makes the operations created within it contructed before the target ``before_op``. + + Parameters + ---------- + before_op: Operation + * The anchor op where the new op is going to be created at (right before `before_op`). + * If the users explicity specify ``before_op`` when creating ``Operation`` object under this context manager, the builder + will respect the one provided by the users. + """ + return BeforeOpContextManager(before_op=before_op) + @staticmethod def scope( *scopes: List[ScopeInfo], - ) -> ScopeContextManger: + ) -> ScopeContextManager: """ The ``mb.scope`` creates a context manager, which makes the operations created within it have the corresponding scope information. @@ -491,7 +530,7 @@

    Source code for coremltools.converters.mil.mil.builder

    * TORCHSCRIPT_MODULE_TYPE: ["Module1", "Module2"] * TORCHSCRIPT_MODULE_NAME: ["module_2"] """ - return ScopeContextManger(*scopes)
    + return ScopeContextManager(*scopes)

    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/coreml_dialect/ops.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/coreml_dialect/ops.html similarity index 93% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/coreml_dialect/ops.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/coreml_dialect/ops.html index 97542f278..2509dde1c 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/coreml_dialect/ops.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/coreml_dialect/ops.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.coreml_dialect.ops — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.coreml_dialect.ops — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/activation.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/activation.html similarity index 96% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/activation.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/activation.html index ac1520f81..57a4da7fa 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/activation.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/activation.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS15.activation — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS15.activation — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -158,13 +153,13 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) alpha: const T (Required) beta: const T (Required) Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same type and shape as ``x``. Attributes @@ -189,12 +184,12 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) alpha: const T (Required) Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape and type as ``x``. Attributes @@ -238,7 +233,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) mode: const str (Optional) * Use ``'EXACT'``, ``'TANH_APPROXIMATION'``, or ``'SIGMOID_APPROXIMATION'`` for ``str``. @@ -246,7 +241,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.activation

    Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape and type as ``x``. Attributes @@ -303,7 +298,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.activation

    Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape and type as ``x``. Attributes @@ -328,13 +323,13 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) alpha: const T (Required) beta: const T (Required) Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape and type as ``x``. Attributes @@ -421,11 +416,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape and type as ``x``. Attributes @@ -448,11 +443,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape and type as ``x``. Attributes @@ -475,14 +470,14 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * Input range is ``(-inf, inf)``. alpha: const T (Required) beta: const T (Required) Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape and type as ``x``. Attributes @@ -505,11 +500,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape as ``x``. Attributes @@ -532,13 +527,13 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) alpha: const T (Required) beta: const T (Required) Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape and type as ``x``. Attributes @@ -563,11 +558,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.activation

    Parameters ---------- - x: tensor<\*, T> + x: tensor<\\*, T> Returns ------- - tensor<\*, T> + tensor<\\*, T> Attributes ---------- @@ -587,11 +582,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape and type as ``x``. Attributes @@ -669,13 +664,13 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) axis: const i32 (Optional) * Default is ``-1``. Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape and type as ``x``. Attributes @@ -719,11 +714,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape and type as ``x``. Attributes @@ -746,12 +741,12 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) alpha: const T (Required) Returns ------- - tensor<\*, T> + tensor<\\*, T> * A tensor of the same shape and type as ``x``. Attributes diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/classify.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/classify.html similarity index 94% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/classify.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/classify.html index 2ed01839f..9d038218c 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/classify.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/classify.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS15.classify — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS15.classify — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -122,12 +117,12 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.classify

    < Parameters ---------- - probabilities: tensor<[\* , ProbT]> (Required) + probabilities: tensor<[\\* , ProbT]> (Required) A tensor in the graph, which is used to compute the classifier output(s). This is the tensor whose values are mapped to the class labels and used for constructing the predicted class label and the output dictionary of class names and values. - classes: list<\*, ClassT> (Required) + classes: list<\\*, ClassT> (Required) List of classes. Returns diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/control_flow.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/control_flow.html similarity index 98% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/control_flow.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/control_flow.html index ab9626e82..433fa0255 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/control_flow.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/control_flow.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS15.control_flow — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS15.control_flow — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -243,7 +238,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.control_flow Parameters ---------- - val: const<\*,T> (Required) + val: const<\\*,T> (Required) mode: immediate_value, file_value (Optional) * Determines how the constant value is stored in the internal MIL format. @@ -252,7 +247,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.control_flow Returns ------- - const<\*,T> + const<\\*,T> Attributes ---------- @@ -322,7 +317,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.control_flowreturn builtin_type, value @property - def weight_id(self) -> int: + def weight_id(self) -> str: """ Weight id for the const. It is used for weight sharing across multiple functions. Constants sharing the same weight_id will use the same blob file value when @@ -331,11 +326,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.control_flowreturn self._weight_id @weight_id.setter - def weight_id(self, val: int) -> None: + def weight_id(self, val: str) -> None: """ Set weight id for the const. """ - assert isinstance(val, int), f"weight_id must be type of int. Got {type(val)}." + assert isinstance(val, str), f"weight_id must be type of str. Got {type(val)}." assert self._weight_id is None, f"cannot set {self.name} weight_id twice." self._weight_id = val

    @@ -375,20 +370,20 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.control_flow Parameters ---------- - cond: tensor<[\*D1], B> (Required) + cond: tensor<[\\*D1], B> (Required) * Tensor. When ``True``, select element from ``x``, otherwise, ``y``. - a: tensor<[\*D2], T> (Optional) + a: tensor<[\\*D2], T> (Optional) * Values selected at indices where ``cond`` is ``True``. * Default is ``None``. - b: tensor<[\*D3], T> (Optional) + b: tensor<[\\*D3], T> (Optional) * Values selected at indices where ``cond`` is ``False``. * Default is ``None``. Returns ------- - tensor<[\*D_out], T> or tensor<[n, len(D1)], int32> + tensor<[\\*D_out], T> or tensor<[n, len(D1)], int32> * If ``a, b`` are both provided, the return shape is based on broadcast rules from ``cond, a, b``. * If ``a, b`` are ``None``, the return shape is 2-D, where the first dimension @@ -841,14 +836,14 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.control_flow Parameters ---------- - ls: List[\*] (Required) + ls: List[\\*] (Required) index: <i32> (Required) * Size of the list. Returns ------- - <\*,T> + <\\*,T> * The element's value. Attributes @@ -882,14 +877,14 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.control_flow Parameters ---------- - ls: List[\*] (Required) + ls: List[\\*] (Required) - indices: <K,i32> (Required) + indices: <K, i32> (Required) * Gather from indices, whose element must be in ``[0, ls.length)`` at runtime. Returns ------- - <\*K,T> + <\\*K, T> * Selected tensors packed into a ``len(ls.elem_shape)+1`` rank tensor. * ``K[0] == len(indices)``. diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/conv.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/conv.html similarity index 94% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/conv.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/conv.html index 08db7e47a..7c4f56df2 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/conv.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/conv.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS15.conv — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS15.conv — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -107,6 +102,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.conv

    from coremltools.converters.mil.mil.ops.defs._utils import \ spatial_dimensions_out_shape from coremltools.converters.mil.mil.ops.defs.iOS15 import _IOS15_TARGET +from coremltools.converters.mil.mil.types.symbolic import is_symbolic
    @@ -118,7 +114,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.conv

    Parameters ---------- - x: tensor<[n, C_in, \*d_in], T> (Required) + x: tensor<[n, C_in, \\*d_in], T> (Required) * ``d_in`` are (possibly runtime-determined) spatial dimensions. For example, ``d_in = [224, 224]`` for 2D convolution. @@ -126,7 +122,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.conv

    * ``C_in`` is the number of input channels or depth dimensions. * ``n`` is the batch dimension. - weight: tensor<[C_out, C_in/groups, \*K], T> (Required) + weight: tensor<[C_out, C_in/groups, \\*K], T> (Required) * Filter weights. * ``C_in`` is the number of input channels. @@ -205,7 +201,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.conv

    Returns ------- - tensor<[n, C_out, \*d_out], T> + tensor<[n, C_out, \\*d_out], T> * Output activation has the same rank and spatial dimension as the input. That is, ``len(d_out) == len(d_in)``. * For ``i=0,..,len(d_in)-1, d_out[i] = floor [(D_in[i] + pad[2*i] + @@ -255,14 +251,16 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.conv

    groups = self.groups.val if self.bias is not None and (len(self.bias.shape) > 1 or self.bias.shape[0] != C_out): - msg = "# of bias values {} not equal to # output channels {}" - raise ValueError(msg.format(self.bias.shape[0], C_out)) - if C_in % groups != 0: - msg = "# of input channels {} not divisible by groups {}" - raise ValueError(msg.format(C_in, groups)) - if C_in // groups != self.weight.shape[1]: - msg = "C_in / groups = {}/{} != weight[1] ({})" - raise ValueError(msg.format(C_in, groups, self.weight.shape[1])) + raise ValueError( + f"# of bias values {self.bias.shape[0]} not equal to # output channels {C_out}" + ) + if not is_symbolic(C_in): + if C_in % groups != 0: + raise ValueError(f"# of input channels {C_in} not divisible by groups {groups}") + if C_in // groups != self.weight.shape[1]: + raise ValueError( + f"C_in / groups = {C_in}/{groups} != weight[1] ({self.weight.shape[1]})" + ) strides = self.strides.val dilations = self.dilations.val diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/elementwise_binary.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/elementwise_binary.html similarity index 92% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/elementwise_binary.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/elementwise_binary.html index fdaa81d89..2911d6968 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/elementwise_binary.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/elementwise_binary.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_binary — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -194,15 +189,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: <\*,T> (Required) + x: <\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: <\*,T> (Required) + y: <\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - <\*,T> + <\\*, T> Attributes ---------- @@ -225,15 +220,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: <\*,T> (Required) + x: <\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: <\*,T> (Required) + y: <\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - <\*, bool> + <\\*, bool> * A boolean tensor with the same shape as the inputs. Attributes @@ -260,15 +255,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: tensor<\*, T> (Required) + x: tensor<\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: tensor<\*, T> (Required) + y: tensor<\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - tensor<\*, T> + tensor<\\*, T> * A tensor of the same type and shape as the inputs. Attributes @@ -292,15 +287,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: tensor<\*, T> (Required) + x: tensor<\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: tensor<\*, T> (Required) + y: tensor<\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - tensor<\*, bool> + tensor<\\*, bool> * A boolean tensor with the same shape as the inputs. Attributes @@ -327,15 +322,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: tensor<\*, T> (Required) + x: tensor<\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: tensor<\*, T> (Required) + y: tensor<\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - tensor<\*?, bool> + tensor<\\*?, bool> * A boolean tensor with the same shape as the inputs. Attributes @@ -362,15 +357,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: tensor<\*, T> (Required) + x: tensor<\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: tensor<\*, T> (Required) + y: tensor<\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - tensor<\*?, bool> + tensor<\\*?, bool> * A boolean tensor with the same shape as the inputs. Attributes @@ -397,15 +392,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: tensor<\*, T> (Required) + x: tensor<\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: tensor<\*, T> (Required) + y: tensor<\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - tensor<\*?, bool> + tensor<\\*?, bool> * A boolean tensor with the same shape as the inputs. Attributes @@ -431,15 +426,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: tensor<\*, T> (Required) + x: tensor<\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: tensor<\*, T> (Required) + y: tensor<\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - tensor<\*?, bool> + tensor<\\*?, bool> * A boolean tensor with the same shape as the inputs. Attributes @@ -466,15 +461,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: tensor<\*, T> (Required) + x: tensor<\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: tensor<\*, T> (Required) + y: tensor<\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - tensor<\*?, bool> + tensor<\\*?, bool> * A boolean tensor with the same shape as the inputs. Attributes @@ -501,15 +496,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: tensor<\*, T> (Required) + x: tensor<\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: tensor<\*, T> (Required) + y: tensor<\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - tensor<\*?, bool> + tensor<\\*?, bool> * A boolean tensor with the same shape as the inputs. Attributes @@ -536,15 +531,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: tensor<\*, T> (Required) + x: tensor<\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: tensor<\*, T> (Required) + y: tensor<\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor with the broadcasted shape from inputs, and type is derived from inputs. Attributes @@ -567,15 +562,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: tensor<\*, T> (Required) + x: tensor<\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: tensor<\*, T> (Required) + y: tensor<\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor with the broadcasted shape from inputs, and type is derived from inputs. Attributes @@ -598,15 +593,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: tensor<\*, T> (Required) + x: tensor<\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: tensor<\*, T> (Required) + y: tensor<\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor with the broadcasted shape from inputs, and type is derived from inputs. Attributes @@ -629,15 +624,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: tensor<\*, T> (Required) + x: tensor<\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: tensor<\*, T> (Required) + y: tensor<\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor with the broadcasted shape from inputs, and type is derived from inputs. Attributes @@ -661,15 +656,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: tensor<\*, T> (Required) + x: tensor<\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: tensor<\*, T> (Required) + y: tensor<\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - tensor<\*?, bool> + tensor<\\*?, bool> * A boolean tensor with the broadcasted shape from inputs. Attributes @@ -695,15 +690,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: tensor<\*, T> (Required) + x: tensor<\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: tensor<\*, T> (Required) + y: tensor<\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor with the broadcasted shape from inputs, and type is derived from inputs. Attributes @@ -726,15 +721,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: tensor<\*, T> (Required) + x: tensor<\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: tensor<\*, T> (Required) + y: tensor<\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor with the broadcasted shape from inputs, and type is derived from inputs. Attributes @@ -757,15 +752,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_bi Parameters ---------- - x: tensor<\*, T> (Required) + x: tensor<\\*, T> (Required) * Shape must be compatible with ``y`` in broadcast. - y: tensor<\*, T> (Required) + y: tensor<\\*, T> (Required) * Shape must be compatible with ``x`` in broadcast. Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor with the broadcasted shape from inputs, and type is derived from inputs. Attributes diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/elementwise_unary.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/elementwise_unary.html similarity index 94% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/elementwise_unary.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/elementwise_unary.html index d548ceb49..831e9edde 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/elementwise_unary.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/elementwise_unary.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -109,7 +104,6 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un from coremltools.converters.mil.mil.types import nptype_from_builtin from coremltools.converters.mil.mil.types.symbolic import is_symbolic from coremltools.converters.mil.mil.types.type_mapping import ( - builtin_to_string, string_to_builtin, string_to_nptype, ) @@ -166,11 +160,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -194,11 +188,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -222,11 +216,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -250,11 +244,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -279,11 +273,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -307,11 +301,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -337,13 +331,13 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) alpha: const T (Required) beta: const T (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -380,11 +374,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> Attributes ---------- @@ -407,11 +401,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -435,11 +429,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -463,11 +457,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -491,11 +485,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -520,11 +514,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -548,7 +542,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) epsilon: const T (Optional, default=1e-4) * This is a small constant that is added to the input, before taking its inverse, for stability. @@ -556,7 +550,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -596,14 +590,14 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) epsilon: const T (Optional, default=1e-45) * This is a small constant that is added to the input, before taking log. * ``y = log(x + epsilon)``. Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -646,11 +640,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], bool> (Required) + x: tensor<[\\*d], bool> (Required) Returns ------- - tensor<[\*d], bool> + tensor<[\\*d], bool> * A tensor of the same shape as ``x``. Attributes @@ -681,11 +675,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -709,7 +703,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) epsilon: const T (Optional, default=1e-12) * This is a small constant that is added to the input, before applying the ``rsqrt`` function, for stability. @@ -717,7 +711,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -760,11 +754,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -788,11 +782,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -816,11 +810,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -844,11 +838,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -872,11 +866,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -900,11 +894,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -929,11 +923,11 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -958,12 +952,12 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) alpha: const T (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -998,13 +992,13 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) dtype: const str (Required) * Can be one of the following types: ``int32``, ``fp16``, ``fp32``, ``bool``. Returns ------- - tensor<[\*d], dtype> + tensor<[\\*d], dtype> * A tensor of the same shape as ``x``, with type ``dtype``. Attributes @@ -1021,10 +1015,6 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_un "T": (types.fp16, types.fp32, types.int32, types.bool), } - @classmethod - def supported_dtypes(cls): - return [builtin_to_string(v) for v in cls.type_domains["T"]] - def type_inference(self): if self.dtype.val not in self.supported_dtypes(): raise NotImplementedError( diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/image_resizing.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/image_resizing.html similarity index 98% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/image_resizing.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/image_resizing.html index 65a64cd95..dec3bd1fd 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/image_resizing.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/image_resizing.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -118,7 +113,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing Parameters ---------- - x: tensor<[\*D, H1, W1],T> (Required) + x: tensor<[\\*D, H1, W1],T> (Required) * Must be at least rank ``3``. scale_factor_height: const<i32> or const<fp32> (Optional, default=1) * Scale factor for the height dimension (``axis=-2``). @@ -129,7 +124,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing Returns ------- - tensor<[\*D, H2, W2],T> + tensor<[\\*D, H2, W2],T> * Tensor with same type as the input. * ``H2`` = floor(``H1`` * ``scale_factor_height``). * ``W2`` = floor(``W1`` * ``scale_factor_width``). @@ -190,7 +185,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing Parameters ---------- - x: tensor<[\*D, H1, W1], T> (Required) + x: tensor<[\\*D, H1, W1], T> (Required) * Must be at least rank ``3``. target_size_height: const<int32> (Required) * Target spatial size for the height dimension (``axis=-2``). @@ -207,7 +202,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing Returns ------- - tensor<[\*D, H2, W2], T> + tensor<[\\*D, H2, W2], T> * Tensor with same type as the input. * ``H2`` = ``target_size_height``. * ``W2`` = ``target_size_width``. @@ -255,7 +250,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing Parameters ---------- - x: tensor<[\*D, H1, W1], T> (Required) + x: tensor<[\\*D, H1, W1], T> (Required) * Must be at least rank ``3``. scale_factor_height: const<U> (Optional, default=1) * Scale factor for the height dimension (``axis=-2``). @@ -311,7 +306,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing Returns ------- - tensor<[\*D, H2, W2], T> + tensor<[\\*D, H2, W2], T> * Tensor with same type as the input. * ``H2`` = floor(``H1`` * ``scale_factor_height``). * ``W2`` = floor(``W1`` * ``scale_factor_width``). @@ -377,7 +372,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing Parameters ---------- - x: tensor<[\*D, H1, W1],T> (Required) + x: tensor<[\\*D, H1, W1],T> (Required) * Must be at least rank ``3``. target_size_height: const<int32> (Optional, default=1) * Target spatial size for the height dimension (``axis=-2``). @@ -453,7 +448,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing Returns ------- - tensor<[\*D, H2, W2],T> + tensor<[\\*D, H2, W2],T> * Tensor with same type as the input. * ``H2`` = ``target_size_height``. * ``W2`` = ``target_size_width``. @@ -685,7 +680,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing Parameters ---------- - x: tensor<[\*D, H1, W1],T> (Required) + x: tensor<[\\*D, H1, W1],T> (Required) * Must be at least rank ``3``. crop_height: const<2, i32> (Required) * Amount to be cropped from the top and bottom of the height dimension @@ -695,7 +690,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.image_resizing Returns ------- - tensor<[\*D, H2, W2],T> + tensor<[\\*D, H2, W2],T> * Tensor with same type as the input. * ``H2`` = ``H1 - crop_height[0] - crop_height[1]``. * ``W2`` = ``W1 - crop_width[0] - crop_width[1]``. diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/linear.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/linear.html similarity index 98% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/linear.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/linear.html index 0f7ae68f4..a4f4cd60a 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/linear.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/linear.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS15.linear — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS15.linear — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -126,7 +121,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.linear

    Parameters ---------- - x: tensor<[\*D,D_in], T> (Required) + x: tensor<[\\*D, D_in], T> (Required) * ``1 <= rank <= 3``. * ``0 <= rank(*D) <= 2``. weight: const tensor<[D_out,D_in], T> (Required) @@ -135,7 +130,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.linear

    Returns ------- - tensor<[\*D,D_out], T> + tensor<[\\*D, D_out], T> * Same rank as the input ``x``. Attributes @@ -244,9 +239,9 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.linear

    Parameters ---------- - x: tensor<[\*,K1], T> (Required) + x: tensor<[\\*, K1], T> (Required) * ``x`` must be 1-D or higher. - y: tensor<[\*,K2], T> (Required) + y: tensor<[\\*, K2], T> (Required) * ``y`` must be 1-D or higher. transpose_x: const bool (Optional) * Default to ``False``. @@ -259,7 +254,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.linear

    Returns ------- - tensor<\*, T> + tensor<\\*, T> * Scalar or tensor output. Attributes @@ -369,7 +364,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.linear

    Returns ------- - tensor<[\*D, C, H, W2], T> + tensor<[\\*D, C, H, W2], T> * Same ranks as the inputs. Attributes diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/normalization.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/normalization.html similarity index 97% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/normalization.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/normalization.html index bf4cb817b..343e81bc7 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/normalization.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/normalization.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS15.normalization — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS15.normalization — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -245,14 +240,14 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.normalization< Parameters ---------- - x: tensor<[\*B, \*D], T> (Required) + x: tensor<[\\*B, \\*D], T> (Required) * Input tensor, ``rank(x) >= 3``. * ``*B`` refers to the leading dimensions. * ``*D`` refers to the spatial dimensions to be normalized. Must be rank 3: ``rank(*D) == 3``. * When ``rank(x) == 3``, in which ``rank(*B) == 0 and rank(*D) == 3``, the input is divided by the square root of the sum of squares of all elements. * For ranks greater than 3, in which ``rank(*B) >= 1 and rank(*D) == 3``, - the leading dimensions \*B, starting from ``0`` to ``-4`` (inclusive), + the leading dimensions \\*B, starting from ``0`` to ``-4`` (inclusive), are all treated as batch. The L2 normalization are done batch-wise. epsilon: const T (Optional) * Small constant to avoid division by ``0``. @@ -260,7 +255,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.normalization< Returns ------- - tensor<[\*B, \*D], T> + tensor<[\\*B, \\*D], T> * Same type and shape as the input tensor ``x``. Attributes @@ -321,20 +316,20 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.normalization< Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * Input tensor. axes: const<[K], i32> (Optional) * Dimensions to perform layer normalization. * Default is ``None`` (all dimensions). - gamma: const tensor<\*?, T>, T> (Optional) + gamma: const tensor<\\*?, T>, T> (Optional) * if provided, the shape must be be ``x.shape[axes]``. For instance, if input ``x`` with shape ``(3,4,5,6)`` and ``axes = [2,3]``, gamma must have shape ``(5,6)``. * Default is all ones. - beta: const tensor<\*?, T>, T> (Optional) + beta: const tensor<\\*?, T>, T> (Optional) * Same shape as gamma. * Default is all zeros. @@ -345,7 +340,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.normalization< Returns ------- - tensor<\*?, T>: + tensor<\\*?, T>: * Tensor with same shape and type as the input tensor ``x``. Attributes diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/pool.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/pool.html similarity index 97% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/pool.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/pool.html index cfaaf7c81..059d5fb4f 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/pool.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/pool.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS15.pool — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS15.pool — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -182,7 +177,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.pool

    Parameters ---------- - x: tensor<[n,C_in,\*D_in], T> (Required) + x: tensor<[n,C_in, \\*D_in], T> (Required) * ``3 <= rank <= 5``. * ``D_in`` are spatial dimensions, ``1 <= len(D_in) <= 3``. * ``C_in`` is the number of input channels or depth dimensions. @@ -230,7 +225,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.pool

    Returns ------- - tensor<[n, C_out,\*D_out], T> + tensor<[n, C_out, \\*D_out], T> * Same rank as ``x``. * ``C_out`` is the number of output channels or depth dimensions. * When ``ceil_mode = False``: diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/random.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/random.html similarity index 97% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/random.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/random.html index 81e736ebb..12c2ccfb0 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/random.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/random.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS15.random — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS15.random — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -162,7 +157,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.random

    Returns ------- - <\*, T> + <\\*, T> * A tensor of the given target output shape filled with random values. Attributes @@ -209,7 +204,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.random

    Parameters ---------- - x: <\*D_in, T> + x: <\\*D_in, T> * N-dimensional tensor which represents ``logits`` (event log-probabilities) or ``probs`` (event probabilities) depending on ``mode``. The first ``N - 1`` dimensions specifies distributions, and the last dimension represents a vector of probabilities. @@ -231,7 +226,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.random

    Returns ------- - <\*D_in[:-1] + [size], T> + <\\*D_in[:-1] + [size], T> * A tensor of the given target output shape filled with random values. Attributes @@ -291,7 +286,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.random

    Returns ------- - <\*, T> + <\\*, T> * A tensor of the given target output shape filled with random values. Attributes @@ -365,7 +360,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.random

    Returns ------- - <\*, T> + <\\*, T> * A tensor of the given target output shape filled with random values. Attributes diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/recurrent.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/recurrent.html similarity index 98% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/recurrent.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/recurrent.html index 86b524091..f809a31df 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/recurrent.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/recurrent.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS15.recurrent — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS15.recurrent — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/reduction.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/reduction.html similarity index 95% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/reduction.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/reduction.html index 5c50605c8..1f819ffd7 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/reduction.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/reduction.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS15.reduction — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS15.reduction — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -100,9 +95,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    import numpy as np from coremltools.converters.mil.mil import Operation, precondition, types -from coremltools.converters.mil.mil.input_type import (DefaultInputs, - InputSpec, - TensorInputType) +from coremltools.converters.mil.mil.input_type import DefaultInputs, InputSpec, TensorInputType from coremltools.converters.mil.mil.operation import VALUE from coremltools.converters.mil.mil.ops.defs._op_reqs import register_op from coremltools.converters.mil.mil.types import nptype_from_builtin @@ -229,7 +222,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Parameters ---------- - x: <\*,T> (Required) + x: <\\*, T> (Required) * Must be 1-dimensional or higher. axis: const<i32> (Optional) @@ -241,7 +234,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Returns ------- - <\*, int32> + <\\*, int32> Attributes ---------- @@ -267,7 +260,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Parameters ---------- - x: <\*,T> (Required) + x: <\\*, T> (Required) * Must be 1-dimensional or higher. axis: const<i32> (Optional) @@ -279,7 +272,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Returns ------- - <\*, int32> + <\\*, int32> Attributes ---------- @@ -305,10 +298,10 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Parameters ---------- - x: <\*,T> (Required) + x: <\\*, T> (Required) * Must be 1-dimensional or higher. - axes: const<K,i32> (Optional, default="None", reduce on all axes.) + axes: const<K, i32> (Optional, default="None", reduce on all axes.) * The dimensions to reduce. keep_dims: const<bool> (Optional, default=False) @@ -317,7 +310,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Returns ------- - <\*,T> + <\\*, T> * Scalar or tensor: The reduced tensor. Attributes @@ -347,10 +340,10 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Parameters ---------- - x: <\*,T> (Required) + x: <\\*, T> (Required) * Must be 1-dimensional or higher. - axes: const<K,i32> (Optional, default="None", reduce on all axes.) + axes: const<K, i32> (Optional, default="None", reduce on all axes.) * The dimensions to reduce. keep_dims: const<bool> (Optional, default=False) @@ -359,7 +352,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Returns ------- - <\*,T> + <\\*, T> * Scalar or tensor: The reduced tensor. Attributes @@ -385,10 +378,10 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Parameters ---------- - x: <\*,T> (Required) + x: <\\*, T> (Required) * Must be 1-dimensional or higher. - axes: const<K,i32> (Optional, default="None", reduce on all axes.) + axes: const<K, i32> (Optional, default="None", reduce on all axes.) * The dimensions to reduce. keep_dims: const<bool> (Optional, default=False) @@ -397,7 +390,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Returns ------- - <\*,T> + <\\*, T> * Scalar or tensor: The reduced tensor. Attributes @@ -426,7 +419,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Parameters ---------- - x: <\*,T> (Required) + x: <\\*, T> (Required) * Must be 1-dimensional or higher. axes: const<K,i32> (Optional, default="None", reduce on all axes.) @@ -438,7 +431,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Returns ------- - <\*,T> + <\\*, T> * Scalar or tensor: The reduced tensor. Attributes @@ -476,7 +469,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Parameters ---------- - x: <\*,T> (Required) + x: <\\*, T> (Required) * Must be 1-dimensional or higher. axes: const<K,i32> (Optional, default="None", reduce on all axes.) @@ -488,7 +481,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Returns ------- - <\*,T> + <\\*, T> * Scalar or tensor: The reduced tensor. Attributes @@ -513,7 +506,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Parameters ---------- - x: <\*,T> (Required) + x: <\\*, T> (Required) * Must be 1-dimensional or higher. axes: const<K,i32> (Optional, default="None", reduce on all axes.) @@ -525,7 +518,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Returns ------- - <\*,T> + <\\*, T> * Scalar or tensor: The reduced tensor. Attributes @@ -551,7 +544,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Parameters ---------- - x: <\*,T> (Required) + x: <\\*,T> (Required) * Must be 1-dimensional or higher. axes: const<K,i32> (Optional, default="None", reduce on all axes.) @@ -563,7 +556,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Returns ------- - <\*,T> + <\\*,T> * Scalar or tensor: The reduced tensor. Attributes @@ -585,7 +578,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Parameters ---------- - x: <\*,T> (Required) + x: <\\*, T> (Required) * Must be 1-dimensional or higher. axes: const<K,i32> (Optional, default="None", reduce on all axes.) @@ -597,7 +590,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Returns ------- - <\*,T> + <\\*, T> * Scalar or tensor: The reduced tensor. Attributes @@ -620,7 +613,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Parameters ---------- - x: <\*,T> (Required) + x: <\\*, T> (Required) * Must be 1-dimensional or higher. axes: const<K,i32> (Optional, default="None", reduce on all axes.) @@ -632,7 +625,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Returns ------- - <\*,T> + <\\*, T> * Scalar or tensor: The reduced tensor. Attributes @@ -654,7 +647,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Parameters ---------- - x: <\*,T> (Required) + x: <\\*, T> (Required) * Must be 1-dimensional or higher. axes: const<K,i32> (Optional, default="None", reduce on all axes.) @@ -666,7 +659,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.reduction

    Returns ------- - <\*,T> + <\\*, T> * Scalar or tensor: The reduced tensor. Attributes diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/scatter_gather.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/scatter_gather.html similarity index 96% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/scatter_gather.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/scatter_gather.html index fab51b1d9..3017d40f0 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/scatter_gather.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/scatter_gather.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -145,15 +140,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather Parameters ---------- - x: tensor<\*D, T> (Required) - indices: tensor<\*N, i32> (Required) + x: tensor<\\*D, T> (Required) + indices: tensor<\\*N, i32> (Required) * Indices values may be negative. More precisely, ``-D[axis]<= v < D[axis]`` for ``v`` in ``indices``. axis: const i32 (Optional. Default=``0``) * Negative axis is supported. Returns ------- - tensor<\*K, T> + tensor<\\*K, T> * Where ``K = D[:axis] + N + D[axis+1:]``. Attributes @@ -258,10 +253,10 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather Parameters ---------- - data: tensor<\*D, T> (Required) + data: tensor<\\*D, T> (Required) indices: tensor<[C], i32> (Required) * 1-D tensor. - updates: tensor<\*K, T> (Required) + updates: tensor<\\*K, T> (Required) * ``K = data.shape[:axis] + [len(indices)] + data.shape[axis+1:]``. axis: const i32 (Optional) * Default to ``0``. @@ -272,7 +267,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather Returns ------- - tensor<\*D, T> + tensor<\\*D, T> * With the same type and shape as input ``x``. Attributes @@ -284,7 +279,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather indices = [1, 0] updates = [[5, 6, 7], [8, 9, 10]] axis = 0 - mode = "update" + mode = "add" produces: [[9, 11, 13], [9, 11, 13]] @@ -345,15 +340,15 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather Parameters ---------- - x: tensor<\*D, T> (Required) - indices: tensor<\*K, i32> (Required) + x: tensor<\\*D, T> (Required) + indices: tensor<\\*K, i32> (Required) * ``rank(indices) == rank(x)``. axis: const i32 (Optional): * Default to ``0``. Returns ------- - tensor<\*D, T>: + tensor<\\*D, T>: * Output tensor has the same shape as ``indices``. Attributes @@ -465,10 +460,10 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather Parameters ---------- - data: tensor<\*D, T> (Required) - indices: tensor<\*K, i32> (Required) + data: tensor<\\*D, T> (Required) + indices: tensor<\\*K, i32> (Required) * ``rank(indices) == rank(data)``. - updates: tensor<\*K, T> (Required) + updates: tensor<\\*K, T> (Required) * Must be the same shape as ``indices``. axis: const i32 (Optional) * Default to ``0``. @@ -479,7 +474,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather Returns ------- - tensor<\*D, T> + tensor<\\*D, T> * With the same type and shape as input ``x``. Attributes @@ -556,12 +551,12 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather Parameters ---------- - x: tensor<\*D, T> (Required) - indices: tensor<\*K, i32> (Required) + x: tensor<\\*D, T> (Required) + indices: tensor<\\*K, i32> (Required) Returns ------- - tensor<\*V, T> + tensor<\\*V, T> * ``V = K[:-1] + D[K[-1]:]``, where ``D = x.shape`` and ``K = indices.shape``. Attributes @@ -622,10 +617,10 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather Parameters ---------- - data: tensor<\*D, T> (Required) - indices: tensor<\*E, i32> (Required) + data: tensor<\\*D, T> (Required) + indices: tensor<\\*E, i32> (Required) * indices.shape[-1] <= data.rank - updates: tensor<\*F, T> (Required) + updates: tensor<\\*F, T> (Required) * Must be the shape as ``indices.shape[:-1] + data.shape[indices.shape[-1]:]``. mode: const string (Optional) * Default to ``add``. @@ -634,7 +629,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.scatter_gather Returns ------- - tensor<\*D, T> + tensor<\\*D, T> * A tensor with the same shape and type as ``data``. Attributes diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/tensor_operation.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/tensor_operation.html similarity index 98% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/tensor_operation.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/tensor_operation.html index b00a3bad0..1197c45fe 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/tensor_operation.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/tensor_operation.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operation — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -138,7 +133,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * Input tensor. lower: const<i32> (Optional) * Number of lower / below sub-diagonals to keep. If negative, keep entire @@ -151,7 +146,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Same type and shape as the input tensor. Attributes @@ -201,7 +196,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * Input tensor. axis: const<i32> (Optional) * Defaults to ``0``. @@ -218,7 +213,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Same type and shape as the input tensor. Attributes @@ -289,7 +284,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Tensor with shape determined by the input shape. Attributes @@ -419,7 +414,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * Tensor, values selected at indices where its values is not equal to ``0``. Returns @@ -481,7 +476,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Returns ------- - tensor<\*?,T> + tensor<\\*?, T> * A tensor that contains one-hot vectors. Attributes @@ -554,9 +549,9 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Parameters ---------- - x: tensor<[\*D_in], T> (Required) + x: tensor<[\\*D_in], T> (Required) - pad: tensor<[2\*N], i32> (Required) + pad: tensor<[2\\*N], i32> (Required) ``N <= D_in``. Last ``N`` dimensions of ``x`` are padded as follows: * For each dimension ``i`` of ``x`` if ``i >= D_in - N``: @@ -581,7 +576,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Returns ------- - tensor<[\*D_out],T> + tensor<[\\*D_out], T> * Tensor with same type as the input. Attributes @@ -756,14 +751,14 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * Input tensor. reps: tensor<[rank(x)], i32> (Required) * A 1-D tensor with length ``rank(x)``, which indicates the number to replicate the input along each dimension. Returns ------- - tensor<\*?, T>: + tensor<\\*?, T>: * An n-D tensor with same type as the input. Attributes @@ -836,7 +831,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Parameters ---------- - x: <\*?, T> (Required) + x: <\\*?, T> (Required) * Input tensor. * axis: const<i32> (Optional) * Defaults to ``-1`` (the last dimension). @@ -847,7 +842,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Returns ------- - tensor<\*?, int32> + tensor<\\*?, int32> * Tensor containing the indices of the sorted values Attributes @@ -893,7 +888,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Parameters ---------- - x: <\*?, T> (Required) + x: <\\*?, T> (Required) * Input tensor. k: const<i32> (Optional) * Defaults to ``1``. @@ -907,9 +902,9 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Values of top/bottom ``k`` elements. - tensor<\*?, int32> + tensor<\\*?, int32> * Indices of the top/bottom ``k`` elements along axis. Attributes @@ -1246,7 +1241,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Parameters ---------- - x: <\*?,T> (Required) + x: <\\*?, T> (Required) * The tensor to split. * The tensors may be variadic, but the number of tensors must be determined at compile time (i.e. a tuple). @@ -1274,7 +1269,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Returns ------- - Tuple[tensor<\*?, T>] + Tuple[tensor<\\*?, T>] * Where the length of the tuple is the number of splits (determined from ``num_splits`` or ``split_sizes``). @@ -1416,8 +1411,9 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati # compare all shape for t in self.values: if not is_compatible_symbolic_vector(t.shape, t_shape): - msg = "Component tensor {} has shape {}, others have {}" - raise ValueError(msg.format(t.name, t.shape, t_shape)) + raise ValueError( + f"Component tensor {t.name} has shape {t.shape}, others have {t_shape}" + ) # Validate values share the same data type dtype = self.values[0].dtype @@ -1466,12 +1462,12 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_operati Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * Input tensor. Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Same type and shape as the input tensor. Attributes diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/tensor_transformation.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/tensor_transformation.html similarity index 98% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/tensor_transformation.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/tensor_transformation.html index d589a01d1..acccd2f9a 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/tensor_transformation.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS15/tensor_transformation.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transformation — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -180,7 +175,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transfo Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * Scalar or tensor. axes: const tensor<[K], i32> Required * ``K`` is the number of dimensions expanded. @@ -190,7 +185,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transfo Returns ------- - tensor<\*(rank(x)+K), T> + tensor<\\*(rank(x)+K), T> * Same type as the input ``x`` with rank ``rank(x)+K``. Attributes @@ -268,7 +263,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transfo Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * An n-D tensor or a scalar. * If ``x`` is fixed rank (and possibly contains symbolic dimension), @@ -292,7 +287,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transfo Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Tensor with shape determined by the input shape. Attributes @@ -449,7 +444,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transfo Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * Input tensor. axes: const<D, i32> (Optional) @@ -458,7 +453,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transfo Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Same type and shape as the input tensor. Attributes @@ -510,7 +505,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transfo Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * Input tensor. lengths: tensor<L, i32> (Required) * 1-dimensional tensor of length ``x.shape[batch_axis]`` specifying the length @@ -525,7 +520,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transfo Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Same type and shape as the input tensor. Attributes @@ -599,7 +594,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transfo Returns ------- - tensor<\*?, T> + tensor<\\*?, T> - Scalar or tensor. Attributes @@ -708,7 +703,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transfo Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Scalar or tensor. Attributes @@ -1007,7 +1002,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transfo Parameters ---------- - x: tensor<\*?,T> (Required) + x: tensor<\\*?, T> (Required) * Must be at least 1-D. axes: const<K,i32> (Optional) * Axes to squeeze out. @@ -1018,7 +1013,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transfo Returns ------- - tensor<\*(rank(x)-K),T> + tensor<\\*(rank(x)-K), T> * Tensor with same type as input ``x`` and rank ``rank(x)-K``. Attributes @@ -1081,14 +1076,14 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transfo Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * Must be at least 1-D. ``x`` may have a symbolic shape. perm: const<[rank(x)], i32> (Required) * Permutation order. -rank(x) <= perm[I] < rank(x) for all perm entries. Returns ------- - tensor<\*?,T> + tensor<\\*?, T> * Tensor with same rank and type as ``x``. Attributes @@ -1187,7 +1182,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transfo Parameters ---------- - x: tensor<[\*d0, d_axis, *dn], T> + x: tensor<[\\*d0, d_axis, *dn], T> * Input tensor. axis: const<i32> @@ -1202,7 +1197,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS15.tensor_transfo Returns ------- - tensor<[\*d0, d_axis - size // stride + 1, size, \*dn], T> + tensor<[\\*d0, d_axis - size // stride + 1, size, \\*dn], T> * The output will be a tensor of rank ``N+1`` where ``N`` is the input tensor rank. diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/constexpr_ops.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/constexpr_ops.html similarity index 98% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/constexpr_ops.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/constexpr_ops.html index 1516b9aed..28493ce60 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/constexpr_ops.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/constexpr_ops.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS16.constexpr_ops — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS16.constexpr_ops — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/image_resizing.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/image_resizing.html similarity index 95% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/image_resizing.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/image_resizing.html index 94c78b069..bf63e9117 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/image_resizing.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/image_resizing.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS16.image_resizing — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS16.image_resizing — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/scatter_gather.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/scatter_gather.html similarity index 96% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/scatter_gather.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/scatter_gather.html index f1860d4ba..7d0f5e950 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/scatter_gather.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/scatter_gather.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS16.scatter_gather — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS16.scatter_gather — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -123,8 +118,8 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS16.scatter_gather Parameters ---------- - x: tensor<\*D, T> (Required) - indices: tensor<\*N, I> (Required) + x: tensor<\\*D, T> (Required) + indices: tensor<\\*N, I> (Required) * Indices values may be negative. More precisely, ``-D[axis]<= v < D[axis]`` for ``v`` in ``indices``. axis: const i32 (Optional. Default=``0``) * Negative axis is supported. @@ -133,7 +128,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS16.scatter_gather Returns ------- - tensor<\*K, T> + tensor<\\*K, T> * Where ``K = D[:axis] + N[batch_dims:] + D[axis+1:]``. Attributes @@ -224,14 +219,14 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS16.scatter_gather Parameters ---------- - x: tensor<\*D, T> (Required) - indices: tensor<\*K, I> (Required) + x: tensor<\\*D, T> (Required) + indices: tensor<\\*K, I> (Required) axis: const i32 (Optional): * Default to ``0``. Returns ------- - tensor<\*D, T>: + tensor<\\*D, T>: * Output tensor has the same shape as ``indices``. Attributes @@ -266,14 +261,14 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS16.scatter_gather Parameters ---------- - x: tensor<\*D, T> (Required) - indices: tensor<\*K, I> (Required) + x: tensor<\\*D, T> (Required) + indices: tensor<\\*K, I> (Required) batch_dims: const i32 (Optional. Default=``0``) * The number of batch dimensions. Returns ------- - tensor<\*V, T> + tensor<\\*V, T> * ``V = K[:-1] + D[batch_dims + K[-1]:]``, where ``D = x.shape`` and ``K = indices.shape``. Attributes diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/tensor_operation.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/tensor_operation.html similarity index 94% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/tensor_operation.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/tensor_operation.html index 702847b50..bb9152dcc 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/tensor_operation.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/tensor_operation.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS16.tensor_operation — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS16.tensor_operation — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -121,7 +116,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS16.tensor_operati Parameters ---------- - ref_tensor: tensor<\*?, T> (Required) + ref_tensor: tensor<\\*?, T> (Required) * Input tensor. value: const<U> (Optional) * Default is ``0.0``. @@ -129,7 +124,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS16.tensor_operati Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Tensor with shape determined by the input tensor. Attributes @@ -181,10 +176,10 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS16.tensor_operati Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Values of top/bottom ``k`` elements. - tensor<\*?, int32> + tensor<\\*?, int32> * Only returned when ``return_indices = True`` * Indices of the top/bottom ``k`` elements along axis. diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/tensor_transformation.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/tensor_transformation.html similarity index 96% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/tensor_transformation.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/tensor_transformation.html index 63ce12675..992efdf75 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/tensor_transformation.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS16/tensor_transformation.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS16.tensor_transformation — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS16.tensor_transformation — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -119,10 +114,10 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS16.tensor_transfo Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * The input tensor to be reshaped. - ref_tensors: Tuple[tensor<\*?, R>] (Required) + ref_tensors: Tuple[tensor<\\*?, R>] (Required) * A tuple of tensors that define the output shape. begins: Tuple[const<int32>] (Required) @@ -163,7 +158,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS16.tensor_transfo Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Same type as input tensor ``x``. * Output shape is computed by ``ref_tensors``, ``begins``, ``ends``, and ``end_masks``. diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/activation.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/activation.html similarity index 96% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/activation.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/activation.html index 0a8ffe882..cd6933796 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/activation.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/activation.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS17.activation — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS17.activation — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -138,13 +133,13 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) alpha: const U (Required) beta: const U (Required) Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same type and shape as ``x``. Attributes @@ -178,12 +173,12 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) alpha: const U (Required) Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape and type as ``x``. Attributes @@ -221,7 +216,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.activation

    Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape and type as ``x``. Attributes @@ -254,13 +249,13 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) alpha: const U (Required) beta: const U (Required) Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape and type as ``x``. Attributes @@ -335,14 +330,14 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * Input range is ``(-inf, inf)``. alpha: const U (Required) beta: const U (Required) Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape and type as ``x``. Attributes @@ -376,13 +371,13 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) alpha: const U (Required) beta: const U (Required) Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * A tensor of the same shape and type as ``x``. Attributes @@ -453,12 +448,12 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.activation

    Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) alpha: const U (Required) Returns ------- - tensor<\*, T> + tensor<\\*, T> * A tensor of the same shape and type as ``x``. Attributes diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/conv.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/conv.html similarity index 95% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/conv.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/conv.html index c1f050fbc..97fc8c0c4 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/conv.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/conv.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS17.conv — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS17.conv — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/elementwise_unary.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/elementwise_unary.html similarity index 94% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/elementwise_unary.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/elementwise_unary.html index e856da0a1..3182389e3 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/elementwise_unary.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/elementwise_unary.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_unary — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_unary — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -123,13 +118,13 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) dtype: const str (Required) * Can be one of the following types: ``int8``, ``uint8``, ``int16``, ``uint16``, ``int32``, ``fp16``, ``fp32``, or ``bool``. Returns ------- - tensor<[\*d], dtype> + tensor<[\\*d], dtype> * A tensor of the same shape as ``x``, with type ``dtype``. Attributes @@ -169,13 +164,13 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) alpha: const T (Required) beta: const T (Required) Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -204,7 +199,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) epsilon: const U (Optional, default=1e-4) * This is a small constant that is added to the input, before taking its inverse, for stability. @@ -212,7 +207,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_un Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -244,14 +239,14 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) epsilon: const U (Optional, default=1e-45) * This is a small constant that is added to the input, before taking log. * ``y = log(x + epsilon)``. Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes @@ -283,7 +278,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_un Parameters ---------- - x: tensor<[\*d], T> (Required) + x: tensor<[\\*d], T> (Required) epsilon: const U (Optional, default=1e-12) * This is a small constant that is added to the input, before applying the ``rsqrt`` function, for stability. @@ -291,7 +286,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.elementwise_un Returns ------- - tensor<[\*d], T> + tensor<[\\*d], T> * A tensor of the same shape as ``x``. Attributes diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/image_resizing.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/image_resizing.html similarity index 98% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/image_resizing.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/image_resizing.html index b13b7184e..f4c7a32f4 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/image_resizing.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/image_resizing.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS17.image_resizing — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS17.image_resizing — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/linear.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/linear.html similarity index 94% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/linear.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/linear.html index c2bf2dddc..f41fff864 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/linear.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/linear.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS17.linear — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS17.linear — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -117,7 +112,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.linear

    Parameters ---------- - x: tensor<[\*D, D_in], T> (Required) + x: tensor<[\\*D, D_in], T> (Required) * ``1 <= rank <= 3``. * ``0 <= rank(*D) <= 2``. weight: const tensor<[D_out, D_in], U> (Required) @@ -126,7 +121,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.linear

    Returns ------- - tensor<[\*D, D_out], T> + tensor<[\\*D, D_out], T> * Same rank as the input ``x``. Attributes @@ -159,9 +154,9 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.linear

    Parameters ---------- - x: tensor<[\*, K1], T> (Required) + x: tensor<[\\*, K1], T> (Required) * ``x`` must be 1-D or higher. - y: tensor<[\*, K2], U> (Required) + y: tensor<[\\*, K2], U> (Required) * ``y`` must be 1-D or higher. transpose_x: const bool (Optional) * Default to ``False``. @@ -174,7 +169,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.linear

    Returns ------- - tensor<\*, V> + tensor<\\*, V> * Scalar or tensor output. * When ``x`` and ``y`` are both const or both non-const, it should follow ios15 behavior that ``x``, ``y``, and ``output`` all have the same dtype. diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/normalization.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/normalization.html similarity index 97% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/normalization.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/normalization.html index 9657df35e..f925e0fdd 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/normalization.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/normalization.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS17.normalization — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS17.normalization — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/quantization_ops.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/quantization_ops.html similarity index 98% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/quantization_ops.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/quantization_ops.html index 73b05810e..959e35639 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/quantization_ops.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/quantization_ops.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS17.quantization_ops — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS17.quantization_ops — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/recurrent.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/recurrent.html similarity index 97% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/recurrent.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/recurrent.html index 404af99b9..ac94735e9 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/recurrent.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/recurrent.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS17.recurrent — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS17.recurrent — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/reduction.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/reduction.html similarity index 94% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/reduction.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/reduction.html index e240b8050..107479d1b 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/reduction.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/reduction.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS17.reduction — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS17.reduction — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -152,7 +147,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.reduction

    Parameters ---------- - x: <\*,T> (Required) + x: <\\*, T> (Required) * Must be 1-dimensional or higher. axis: const<i32> (Optional) @@ -170,7 +165,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.reduction

    Returns ------- - <\*, U> + <\\*, U> Attributes ---------- @@ -197,7 +192,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.reduction

    Parameters ---------- - x: <\*,T> (Required) + x: <\\*, T> (Required) * Must be 1-dimensional or higher. axis: const<i32> (Optional) @@ -214,7 +209,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.reduction

    Returns ------- - <\*, U> + <\\*, U> Attributes ---------- diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/scatter_gather.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/scatter_gather.html similarity index 96% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/scatter_gather.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/scatter_gather.html index 45f38a9c4..f2cbb8423 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/scatter_gather.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/scatter_gather.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -147,10 +142,10 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather Parameters ---------- - data: tensor<\*D, T> (Required) + data: tensor<\\*D, T> (Required) indices: tensor<[C], i32> (Required) * 1-D tensor. - updates: tensor<\*K, T> (Required) + updates: tensor<\\*K, T> (Required) * ``K = data.shape[:axis] + [len(indices)] + data.shape[axis+1:]``. axis: const i32 (Optional) * Default to ``0``. @@ -167,7 +162,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather Returns ------- - tensor<\*D, T> + tensor<\\*D, T> * With the same type and shape as input ``x``. Attributes @@ -220,10 +215,10 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather Parameters ---------- - data: tensor<\*D, T> (Required) - indices: tensor<\*K, i32> (Required) + data: tensor<\\*D, T> (Required) + indices: tensor<\\*K, i32> (Required) * ``rank(indices) == rank(data)``. - updates: tensor<\*K, T> (Required) + updates: tensor<\\*K, T> (Required) * Must be the same shape as ``indices``. axis: const i32 (Optional) * Default to ``0``. @@ -240,7 +235,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather Returns ------- - tensor<\*D, T> + tensor<\\*D, T> * With the same type and shape as input ``x``. Attributes @@ -292,9 +287,9 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather Parameters ---------- - data: tensor<\*D, T> (Required) - indices: tensor<\*K, i32> (Required) - updates: tensor<\*K, T> (Required) + data: tensor<\\*D, T> (Required) + indices: tensor<\\*K, i32> (Required) + updates: tensor<\\*K, T> (Required) * Must be the shape as ``K[:-1]+data.shape[K[-1]:]``. mode: const string (Optional) * Default to ``add``. @@ -308,7 +303,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather Returns ------- - tensor<\*D, T> + tensor<\\*D, T> * A tensor with the same shape and type as ``data``. Attributes @@ -372,8 +367,8 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather Parameters ---------- - x: tensor<\*D, T> (Required) - indices: tensor<\*N, I> (Required) + x: tensor<\\*D, T> (Required) + indices: tensor<\\*N, I> (Required) * Indices values may be negative. More precisely, ``-D[axis]<= v < D[axis]`` for ``v`` in ``indices``. axis: const i32 (Optional. Default=``0``) * Negative axis is supported. @@ -389,7 +384,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather Returns ------- - tensor<\*K, T> + tensor<\\*K, T> * Where ``K = D[:axis] + N[batch_dims:] + D[axis+1:]``. Attributes @@ -450,8 +445,8 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather Parameters ---------- - x: tensor<\*D, T> (Required) - indices: tensor<\*K, I> (Required) + x: tensor<\\*D, T> (Required) + indices: tensor<\\*K, I> (Required) * ``rank(indices) == rank(x)``. axis: const i32 (Optional): * Default to ``0``. @@ -464,7 +459,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather Returns ------- - tensor<\*D, T>: + tensor<\\*D, T>: * Output tensor has the same shape as ``indices``. Attributes @@ -526,8 +521,8 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather Parameters ---------- - x: tensor<\*D, T> (Required) - indices: tensor<\*K, I> (Required) + x: tensor<\\*D, T> (Required) + indices: tensor<\\*K, I> (Required) batch_dims: const i32 (Optional. Default=``0``) * The number of batch dimensions. validate_indices: const bool (Optional) @@ -539,7 +534,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.scatter_gather Returns ------- - tensor<\*V, T> + tensor<\\*V, T> * ``V = K[:-1] + D[batch_dims + K[-1]:]``, where ``D = x.shape`` and ``K = indices.shape``. Attributes diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/tensor_operation.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/tensor_operation.html similarity index 96% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/tensor_operation.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/tensor_operation.html index fb423c97e..30b92e275 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/tensor_operation.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/tensor_operation.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS17.tensor_operation — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS17.tensor_operation — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -197,7 +192,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_operati Parameters ---------- - x: <\*?, T> (Required) + x: <\\*?, T> (Required) * Input tensor. k: const<K> (Optional) * Defaults to ``1``. @@ -222,10 +217,10 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_operati Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Values of top/bottom ``k`` elements. - tensor<\*?, U> + tensor<\\*?, U> * Only returned when ``return_indices = True`` * Indices of the top/bottom ``k`` elements along axis. * U is int32 or uint16 determined by ``output_indices_dtype`` (int32 by default). diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/tensor_transformation.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/tensor_transformation.html similarity index 96% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/tensor_transformation.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/tensor_transformation.html index f11aee1fb..4e0ce48e6 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/tensor_transformation.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS17/tensor_transformation.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transformation — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transformation — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -163,7 +158,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * An ``n-D`` tensor or a scalar. * If ``x`` has a fixed rank (and possibly contains symbolic dimension), @@ -187,7 +182,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Tensor with shape determined by the input shape. Attributes @@ -249,10 +244,10 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * The input tensor to be reshaped. - ref_tensors: Tuple[tensor<\*?, R>] (Required) + ref_tensors: Tuple[tensor<\\*?, R>] (Required) * A tuple of tensors that define the output shape. begins: Tuple[const<int32>] (Required) @@ -267,7 +262,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Same type as input tensor ``x``. * Output shape is computed by ``ref_tensors``, ``begins``, ``ends``, and ``end_masks``. @@ -312,7 +307,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * Scalar or tensor. axes: const tensor<[K], int32> Required * ``K`` is the number of dimensions expanded. @@ -322,7 +317,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Returns ------- - tensor<\*(rank(x)+K), T> + tensor<\\*(rank(x)+K), T> * Same type as the input ``x`` with rank ``rank(x)+K``. Attributes @@ -362,7 +357,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Parameters ---------- - x: tensor<\*?,T> (Required) + x: tensor<\\*?, T> (Required) * Must be at least 1-D. axes: const<K,int32> (Optional) * Axes to squeeze out. @@ -370,7 +365,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Returns ------- - tensor<\*(rank(x)-K),T> + tensor<\\*(rank(x)-K), T> * Tensor with same type as input ``x`` and rank ``rank(x)-K``. Attributes @@ -410,7 +405,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * Input tensor. axes: const<D, int32> (Optional) @@ -419,7 +414,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Same type and shape as the input tensor. Attributes @@ -464,7 +459,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * Input tensor. lengths: tensor<L, U> (Required) * 1-dimensional tensor of length ``x.shape[batch_axis]`` specifying the length @@ -479,7 +474,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Same type and shape as the input tensor. Attributes @@ -524,7 +519,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Parameters ---------- - x: tensor<[\*d0, d_axis, *dn], T> + x: tensor<[\\*d0, d_axis, *dn], T> * Input tensor. axis: const<int32> @@ -539,7 +534,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Returns ------- - tensor<[\*d0, d_axis - size // stride + 1, size, \*dn], T> + tensor<[\\*d0, d_axis - size // stride + 1, size, \\*dn], T> * The output will be a tensor of rank ``N+1`` where ``N`` is the input tensor rank. @@ -582,14 +577,14 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Parameters ---------- - x: tensor<\*?, T> (Required) + x: tensor<\\*?, T> (Required) * Must be at least 1-D. ``x`` may have a symbolic shape. perm: const<[rank(x)], i32> (Required) * Permutation order. -rank(x) <= perm[I] < rank(x) for all perm entries. Returns ------- - tensor<\*?,T> + tensor<\\*?,T> * Tensor with same rank and type as ``x``. Attributes @@ -654,7 +649,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Returns ------- - tensor<\*?, T> + tensor<\\*?, T> - Scalar or tensor. Attributes @@ -713,7 +708,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS17.tensor_transfo Returns ------- - tensor<\*?, T> + tensor<\\*?, T> * Scalar or tensor. Attributes diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/compression.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/compression.html similarity index 97% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/compression.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/compression.html index 9a64ea988..af23c9618 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/compression.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/compression.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS18.compression — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS18.compression — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -98,6 +93,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS18.compression# Use of this source code is governed by a BSD-3-clause license that can be # found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause +import math from typing import List, Optional import numpy as np @@ -422,11 +418,17 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS18.compressionf"{indices_dim} while 'lut' has {lut_dim}." ) - nbits = indices_dtype.get_bitwidth() - if lut_shape[-2] != 2**nbits: + num_palettes = lut_shape[-2] + nbits = int(math.log2(num_palettes)) + if num_palettes != 2**nbits: + raise ValueError( + f"Invalid parameter 'lut'; the second last dim should have size 2^nbits, but got {lut_shape[-2]}." + ) + if nbits != indices_dtype.get_bitwidth(): raise ValueError( - "Invalid parameter 'lut'; the second last dim should have size " - f"2^nbits, where nbits is {nbits}, but got {lut_shape[-2]}." + f"Invalid parameter 'indices'; the second last dim indicate number of palettes ({num_palettes}), " + f"which means nbits is {nbits}, so the dtype of indices should be uint{nbits}, but got " + f"{types.builtin_to_string(indices_dtype)}." ) if vector_axis is not None: diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/recurrent.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/recurrent.html similarity index 91% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/recurrent.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/recurrent.html index c1ee0c103..f2f96069a 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/recurrent.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/recurrent.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS18.recurrent — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS18.recurrent — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -113,9 +108,12 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS18.recurrent

    """ Gated Recurrent Unit (GRU) - The only difference between this version and the iOS 17 :py:class:`~.iOS17.recurrent.gru` is - the reset_after parameter. This parameter is optional and defaults to False. When True, the - reset gate is applied before the elementwise matrix multiplication. + Two new parameters have been added to the iOS 17 :py:class:`~.iOS17.recurrent.gru`. + + `reset_after` - this parameter is optional and defaults to False. When True, the reset + gate is applied before the elementwise matrix multiplication. + + `input_bias` - const<3*H, T> (Optional) [Default all 0s]. """ input_spec = InputSpec( x=TensorInputType(type_domain="T"), diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/states.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/states.html similarity index 92% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/states.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/states.html index 3bd670d7b..06a69ec26 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/states.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/states.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS18.states — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS18.states — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/tensor_transformation.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/tensor_transformation.html similarity index 96% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/tensor_transformation.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/tensor_transformation.html index cabb4cfc0..d35be15ab 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/tensor_transformation.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/tensor_transformation.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS18.tensor_transformation — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS18.tensor_transformation — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -133,7 +128,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS18.tensor_transfo ---------- x: tensor<*?, T> (Required) * Input tensor. - update: tensor<\*K, T> (Required) + update: tensor<\\*K, T> (Required) * Value tensor to be inserted. * The shape of the update tensor must match the slicing result of the input data. * rank-0 update is not supported. @@ -156,7 +151,7 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS18.tensor_transfo Returns ------- - tensor<\*?, T> + tensor<\\*?, T> - Scalar or tensor. Attributes diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/transformers.html b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/transformers.html similarity index 96% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/transformers.html rename to docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/transformers.html index 93c4a3e4d..ef2c67e2b 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/transformers.html +++ b/docs/_modules/coremltools/converters/mil/mil/ops/defs/iOS18/transformers.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.ops.defs.iOS18.transformers — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.ops.defs.iOS18.transformers — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -134,18 +129,18 @@

    Source code for coremltools.converters.mil.mil.ops.defs.iOS18.transformers Mask can either be bool or float matching query, key, or value. For bool, it indicates whether the element should take part in the attention. Floats are added to the attention score. - Mask shape must be broadcastable to ``[B, \*?, L, S]``. + Mask shape must be broadcastable to ``[B, \\*?, L, S]``. Parameters ---------- - query: tensor<[B, \*?, L, E], T> (Required) - key: tensor<[B, \*?, S, E], T> (Required) - value: tensor<[B, \*?, S, EV], T> (Required) - attn_mask: tensor<[\*?, S], M> (Optional) + query: tensor<[B, \\*?, L, E], T> (Required) + key: tensor<[B, \\*?, S, E], T> (Required) + value: tensor<[B, \\*?, S, EV], T> (Required) + attn_mask: tensor<[\\*?, S], M> (Optional) Returns ------- - tensor<[B, \*?, L, EV], T> + tensor<[B, \\*?, L, EV], T> Attributes ---------- diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/const_deduplication.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/const_deduplication.html similarity index 97% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/const_deduplication.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/const_deduplication.html index bda100fb0..1ba2e81c0 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/const_deduplication.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/const_deduplication.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.cleanup.const_deduplication — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.cleanup.const_deduplication — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -200,7 +195,7 @@

    Source code for coremltools.converters.mil.mil.passes.defs.cleanup.const_ded all_vars = [k] + list(v) all_vars = list(set(all_vars)) for duplicate in all_vars: - duplicate.op.weight_id = i + duplicate.op.weight_id = str(i) def remove_duplicate_ops( self, block: Block, unique2duplicates: Dict[Var, List[Var]], force_replace: bool @@ -247,7 +242,7 @@

    Source code for coremltools.converters.mil.mil.passes.defs.cleanup.const_ded hashkey_2_duplicates: Dict[Tuple, List[Var]] = {} for block in blocks: for op in list(block.operations): - if "constexpr" not in op.op_type: + if not op.op_type.startswith("constexpr_"): continue if hasattr(op, "weight_key"): hash_key = [op.op_type, op.weight_key] diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/const_elimination.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/const_elimination.html similarity index 96% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/const_elimination.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/const_elimination.html index efe337411..db08c3110 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/const_elimination.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/const_elimination.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.cleanup.const_elimination — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.cleanup.const_elimination — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/dead_code_elimination.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/dead_code_elimination.html similarity index 94% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/dead_code_elimination.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/dead_code_elimination.html index 963bcac5b..b9148de85 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/dead_code_elimination.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/dead_code_elimination.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.cleanup.dead_code_elimination — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.cleanup.dead_code_elimination — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/dedup_op_and_var_names.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/dedup_op_and_var_names.html similarity index 95% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/dedup_op_and_var_names.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/dedup_op_and_var_names.html index ea72f60d3..d9655b656 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/dedup_op_and_var_names.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/dedup_op_and_var_names.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.cleanup.dedup_op_and_var_names — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.cleanup.dedup_op_and_var_names — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/expand_dynamic_linear.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/expand_dynamic_linear.html similarity index 96% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/expand_dynamic_linear.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/expand_dynamic_linear.html index 463522c58..895c2e10a 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/expand_dynamic_linear.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/expand_dynamic_linear.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.cleanup.expand_dynamic_linear — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.cleanup.expand_dynamic_linear — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/fuse_reduce_mean.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/fuse_reduce_mean.html similarity index 96% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/fuse_reduce_mean.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/fuse_reduce_mean.html index 8d2af5ad0..1c5c7a000 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/fuse_reduce_mean.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/fuse_reduce_mean.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.cleanup.fuse_reduce_mean — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.cleanup.fuse_reduce_mean — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/loop_invariant_elimination.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/loop_invariant_elimination.html similarity index 96% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/loop_invariant_elimination.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/loop_invariant_elimination.html index 330f235d0..e5a8cc7ae 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/loop_invariant_elimination.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/loop_invariant_elimination.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.cleanup.loop_invariant_elimination — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.cleanup.loop_invariant_elimination — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/noop_elimination.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/noop_elimination.html similarity index 97% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/noop_elimination.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/noop_elimination.html index db5bbed87..1190508db 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/noop_elimination.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/noop_elimination.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.cleanup.noop_elimination — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.cleanup.noop_elimination — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/remove_redundant_ops.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/remove_redundant_ops.html similarity index 98% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/remove_redundant_ops.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/remove_redundant_ops.html index 1afde7378..643452449 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/remove_redundant_ops.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/remove_redundant_ops.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.cleanup.remove_redundant_ops — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.cleanup.remove_redundant_ops — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/remove_symbolic_reshape.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/remove_symbolic_reshape.html similarity index 95% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/remove_symbolic_reshape.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/remove_symbolic_reshape.html index 4cdb56575..563eeb76e 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/remove_symbolic_reshape.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/remove_symbolic_reshape.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.cleanup.remove_symbolic_reshape — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.cleanup.remove_symbolic_reshape — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/topological_reorder.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/topological_reorder.html similarity index 96% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/topological_reorder.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/topological_reorder.html index cf9740a13..a722153cd 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/topological_reorder.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/cleanup/topological_reorder.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.cleanup.topological_reorder — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.cleanup.topological_reorder — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_activation.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_activation.html similarity index 99% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_activation.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_activation.html index 5b7e7dbd1..4f3535d82 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_activation.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_activation.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.optimize_activation — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.optimize_activation — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_conv.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_conv.html similarity index 99% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_conv.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_conv.html index b17345473..14c6f4d4e 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_conv.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_conv.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.optimize_conv — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.optimize_conv — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_elementwise_binary.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_elementwise_binary.html similarity index 98% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_elementwise_binary.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_elementwise_binary.html index 6d9cdf391..a57e756f5 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_elementwise_binary.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_elementwise_binary.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.optimize_elementwise_binary — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.optimize_elementwise_binary — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_linear.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_linear.html similarity index 90% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_linear.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_linear.html index d79069397..e8571d24f 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_linear.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_linear.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.optimize_linear — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.optimize_linear — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -287,21 +282,20 @@

    Source code for coremltools.converters.mil.mil.passes.defs.optimize_linearreturn add_op_candidate @staticmethod - def _transpose(v, before_op, name=None): + def _transpose(v, name=None): """ Transpose the last 2 dims. - ``v``: (Var, must be a tensor). - - ``before_op``: (Operation) The op right before the newly added ``transpose`` op. - ``name``: Name for the ``transpose`` op if provided. """ perm = list(range(v.rank)) perm[-2], perm[-1] = perm[-1], perm[-2] if name is None: - return mb.transpose(x=v, perm=perm, before_op=before_op) + return mb.transpose(x=v, perm=perm) else: - return mb.transpose(x=v, perm=perm, before_op=before_op, name=name) + return mb.transpose(x=v, perm=perm, name=name) def _try_to_transform(self, matmul_op, add_op, block): if matmul_op.x.val is None and matmul_op.y.val is None: @@ -353,33 +347,25 @@

    Source code for coremltools.converters.mil.mil.passes.defs.optimize_linearbias = -bias out_name = add_op.outputs[0].name - if x_is_weight: - # If transpose_x == transpose_weight == False: - # w*x = (x^T w^T)^T = linear(x^T, w)^T - x_transposed = ( - self._transpose(linear_x, before_op=matmul_op) if not transpose_x else linear_x - ) - w_no_transpose = ( - weight if not transpose_weight else self._transpose(weight, before_op=matmul_op) - ) - x = mb.linear(x=x_transposed, weight=w_no_transpose, bias=bias, before_op=matmul_op) - x = self._transpose(x, before_op=matmul_op, name=out_name) - else: - # If transpose_x == transpose_weight == False - # x*w = x*(w^T)^T = linear(x, w^T) - x_no_transpose = ( - self._transpose(linear_x, before_op=matmul_op) if transpose_x else linear_x - ) - w_transposed = ( - weight if transpose_weight else self._transpose(weight, before_op=matmul_op) - ) - x = mb.linear( - x=x_no_transpose, - weight=w_transposed, - bias=bias, - before_op=matmul_op, - name=out_name, - ) + with mb.set_before_op(matmul_op): + if x_is_weight: + # If transpose_x == transpose_weight == False: + # w*x = (x^T w^T)^T = linear(x^T, w)^T + x_transposed = self._transpose(linear_x) if not transpose_x else linear_x + w_no_transpose = weight if not transpose_weight else self._transpose(weight) + x = mb.linear(x=x_transposed, weight=w_no_transpose, bias=bias) + x = self._transpose(x, name=out_name) + else: + # If transpose_x == transpose_weight == False + # x*w = x*(w^T)^T = linear(x, w^T) + x_no_transpose = self._transpose(linear_x) if transpose_x else linear_x + w_transposed = weight if transpose_weight else self._transpose(weight) + x = mb.linear( + x=x_no_transpose, + weight=w_transposed, + bias=bias, + name=out_name, + ) if add_op.enclosing_block.try_replace_uses_of_var_after_op( anchor_op=add_op, diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_normalization.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_normalization.html similarity index 99% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_normalization.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_normalization.html index 7d04b5c60..590f3ac57 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_normalization.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_normalization.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.optimize_normalization — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.optimize_normalization — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_quantization.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_quantization.html similarity index 99% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_quantization.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_quantization.html index 5f3e674b2..3cd28a3c4 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_quantization.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_quantization.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.optimize_quantization — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.optimize_quantization — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_repeat_ops.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_repeat_ops.html similarity index 99% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_repeat_ops.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_repeat_ops.html index 2c809439d..547ee4ab0 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_repeat_ops.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_repeat_ops.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_state.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_state.html similarity index 97% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_state.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_state.html index b87ccaf04..66374b383 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_state.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_state.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.optimize_state — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.optimize_state — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_tensor_operation.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_tensor_operation.html similarity index 89% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_tensor_operation.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_tensor_operation.html index 5bbda449a..49d2ba013 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/optimize_tensor_operation.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/optimize_tensor_operation.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.optimize_tensor_operation — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -102,7 +97,9 @@

    Source code for coremltools.converters.mil.mil.passes.defs.optimize_tensor_o from coremltools.converters.mil._deployment_compatibility import AvailableTarget from coremltools.converters.mil.frontend._utils import value_at +from coremltools.converters.mil.mil import Block from coremltools.converters.mil.mil import Builder as mb +from coremltools.converters.mil.mil import Operation, Program from coremltools.converters.mil.mil.block import is_current_opset_version_compatible_with from coremltools.converters.mil.mil.passes.graph_pass import AbstractGraphPass from coremltools.converters.mil.mil.passes.helper import ( @@ -1044,6 +1041,120 @@

    Source code for coremltools.converters.mil.mil.passes.defs.optimize_tensor_o for op in list(block.operations): self._match_pattern(op, block)

    + + +@register_pass(namespace="common") +class fuse_stack_split(AbstractGraphPass): + """ + Detect the pattern ``inputs -> stack -> split -> squeeze`` and fuse them into an ``identity`` if the pattern + cancel out each out. + Note that, the ``identity`` can be further removed by ``noop_elimination``. + + .. code-block:: + + Input: + %4 = stack([%1, %2, %3], axis=0) + %5, %6, %7 = split(%4, axis=0) + %8 = squeeze(%5, axes=[0]) + %9 = squeeze(%6, axes=[0]) + %10 = squeeze(%7, axes=[0]) + + Output: + %8 = identity(%1) + %9 = identity(%2) + %10 = identity(%3) + """ + + def apply(self, prog: Program) -> None: + for f in prog.functions.values(): + self.fuse_stack_split_block(f) + + @staticmethod + def _try_to_transform(block: Block, stack_op: Operation) -> None: + def _convert_axis_to_positive(axis, rank): + if axis < 0: + return axis + rank + 1 + return axis + + def _try_fuse_a_branch(values, rank, axis, split_op): + ops_to_remove = [split_op] + + # check if the split op have the correct config + if _convert_axis_to_positive(split_op.axis.val, rank) != axis: + return + + split_sizes = split_op.split_sizes + if split_sizes is not None: + if split_sizes.val.tolist() != [1] * len(values): + return + + num_splits = split_op.num_splits + if num_splits is not None: + if num_splits.val != len(values): + return + + ops_to_remove.append(split_op) + + # check if any of the output var of the stack / split op is the block output + for val in ops_to_remove: + for v in val.outputs: + if v in block.outputs: + return + + # check if the outputs of the split op feed only into squeeze + split_out_vars = split_op.outputs + vars_to_replace = [] + + for val in split_out_vars: + if len(val.child_ops) != 1 or val.child_ops[0].op_type != "squeeze": + should_fuse = False + + squeeze_op = val.child_ops[0] + if [ + _convert_axis_to_positive(val, rank) for val in squeeze_op.axes.val.tolist() + ] != [axis]: + return + + vars_to_replace.append(squeeze_op.outputs[0]) + ops_to_remove.append(squeeze_op) + + for _input, _var in zip(values, vars_to_replace): + new_var = mb.identity(x=_input, before_op=squeeze_op) + block.replace_uses_of_var_after_op( + anchor_op=squeeze_op, + old_var=_var, + new_var=new_var, + ) + block.remove_ops(ops_to_remove) + + if stack_op.outputs[0] in block.outputs: + return + + # get the params of the stack op + values = stack_op.values + rank = values[0].rank + axis = _convert_axis_to_positive(stack_op.axis.val, rank) + + # go through the split child ops + for val in list(stack_op.outputs[0].child_ops): + if val.op_type == "split": + _try_fuse_a_branch(values, rank, axis, val) + + # remove the stack op if its output no longer consumed by any ops + if len(stack_op.outputs[0].child_ops) == 0: + block.remove_ops([stack_op]) + + + @block_context_manager + def fuse_stack_split_block(self, block: Block) -> None: + for op in list(block.operations): + for b in op.blocks: + self.fuse_stack_split_block(b) + + if op.op_type != "stack": + continue + + self._try_to_transform(block, op)
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/preprocess.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/preprocess.html similarity index 98% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/preprocess.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/preprocess.html index 6d99c2075..83c78c994 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/preprocess.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/preprocess.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.preprocess — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.preprocess — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/quantization.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/quantization.html similarity index 99% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/quantization.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/quantization.html index 3b33c80df..bf12bec43 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/quantization.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/quantization.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.quantization — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.quantization — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/symbol_transform.html b/docs/_modules/coremltools/converters/mil/mil/passes/defs/symbol_transform.html similarity index 95% rename from docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/symbol_transform.html rename to docs/_modules/coremltools/converters/mil/mil/passes/defs/symbol_transform.html index 806f35f10..ad468fc08 100644 --- a/docs/_build/html/_modules/coremltools/converters/mil/mil/passes/defs/symbol_transform.html +++ b/docs/_modules/coremltools/converters/mil/mil/passes/defs/symbol_transform.html @@ -1,11 +1,13 @@ + + - coremltools.converters.mil.mil.passes.defs.symbol_transform — coremltools API Reference 8.0b1 documentation + coremltools.converters.mil.mil.passes.defs.symbol_transform — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -350,6 +345,8 @@

    Source code for coremltools.converters.mil.mil.passes.defs.symbol_transform< and source_const_var.op.weight_key is not None ): target_const_var.op.weight_key = source_const_var.op.weight_key + if hasattr(source_const_var.op, "weight_id") and source_const_var.op.weight_id is not None: + target_const_var.op.weight_id = source_const_var.op.weight_id return target_const_var def apply(self, prog: Program) -> None: @@ -400,6 +397,12 @@

    Source code for coremltools.converters.mil.mil.passes.defs.symbol_transform< assert ( source_input_var.op.op_type == "const" ), "Only const may be absent from context" + # The consts across function should share the same file value while lowering into milproto, + # so we assign the weight_id, if not presented. + if source_input_var.op.weight_id is None: + source_input_var.op.weight_id = ( + f"const_{source_input_var.name}_weight_id" + ) context[source_input_var.name] = self._copy_construct_const_var( source_input_var ) @@ -427,19 +430,7 @@

    Source code for coremltools.converters.mil.mil.passes.defs.symbol_transform< ] ) - prog.add_function(target_function_name, target_function) - - # For some reason, if we run const_deduplication._deduplicate_const_across_functions here, - # the populated `const.weight_id` will get lost if we run pass pipeline afterwards, - # so we have no choice but to let user manually deduplicate after all passes are done - # TODO (rdar://131680531): Investigate why it happens & whether we can change this behavior - logger.warning( - "(If you are using ct.utils.materialize_dynamic_shape_mlmodel, " - "you are safe to ignore this warning message) " - "Weights are duplicated in each materialized new function, " - "so you may want to run const_deduplication._deduplicate_const_across_functions " - "on your pymil program before serialization to milproto" - )

    + prog.add_function(target_function_name, target_function)
    diff --git a/docs/_build/html/_modules/coremltools/converters/sklearn/_converter.html b/docs/_modules/coremltools/converters/sklearn/_converter.html similarity index 96% rename from docs/_build/html/_modules/coremltools/converters/sklearn/_converter.html rename to docs/_modules/coremltools/converters/sklearn/_converter.html index 8a3c61ff7..470c246a1 100644 --- a/docs/_build/html/_modules/coremltools/converters/sklearn/_converter.html +++ b/docs/_modules/coremltools/converters/sklearn/_converter.html @@ -1,11 +1,13 @@ + + - coremltools.converters.sklearn._converter — coremltools API Reference 8.0b1 documentation + coremltools.converters.sklearn._converter — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/converters/xgboost/_tree.html b/docs/_modules/coremltools/converters/xgboost/_tree.html similarity index 94% rename from docs/_build/html/_modules/coremltools/converters/xgboost/_tree.html rename to docs/_modules/coremltools/converters/xgboost/_tree.html index 0fdf3a6b3..b77367f28 100644 --- a/docs/_build/html/_modules/coremltools/converters/xgboost/_tree.html +++ b/docs/_modules/coremltools/converters/xgboost/_tree.html @@ -1,11 +1,13 @@ + + - coremltools.converters.xgboost._tree — coremltools API Reference 8.0b1 documentation + coremltools.converters.xgboost._tree — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/models/_compiled_model.html b/docs/_modules/coremltools/models/_compiled_model.html similarity index 69% rename from docs/_build/html/_modules/coremltools/models/_compiled_model.html rename to docs/_modules/coremltools/models/_compiled_model.html index 3dcf17e10..d146fd31d 100644 --- a/docs/_build/html/_modules/coremltools/models/_compiled_model.html +++ b/docs/_modules/coremltools/models/_compiled_model.html @@ -1,11 +1,13 @@ + + - coremltools.models._compiled_model — coremltools API Reference 8.0b1 documentation + coremltools.models._compiled_model — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -100,14 +95,14 @@

    Source code for coremltools.models._compiled_model

    from os.path import expanduser as _expanduser from typing import Optional as _Optional +from typing import Type as _Type from coremltools import ComputeUnit as _ComputeUnit from coremltools.models.model import MLState as _MLState -from .model import ( - _verify_optimization_hint_input, - MLModel as _MLModel, -) +from .model import MLModel as _MLModel +from .model import MLModelAsset as _MLModelAsset +from .model import _verify_optimization_hint_input from .utils import _macos_version try: @@ -126,6 +121,7 @@

    Source code for coremltools.models._compiled_model

    compute_units: _ComputeUnit, function_name: str, optimization_hints: _Optional[dict] = None, + asset: _Optional[_MLModelAsset] = None, ): if _macos_version() < (10, 13): raise Exception("Loading compiled Core ML models is only support on macOS 10.13 or higher.") @@ -135,14 +131,18 @@

    Source code for coremltools.models._compiled_model

    if not isinstance(path, str): raise TypeError('The "path" parameter must be of type "str".') + + if not asset is None and not isinstance(asset, _MLModelAsset): + raise TypeError('The "asset" parameter must be of type "MLModelAsset".') + if not isinstance(compute_units, _ComputeUnit): raise TypeError('The "compute_units" parameter must be of type: "coremltools.ComputeUnit".') + if not isinstance(function_name, str): raise TypeError('The "function_name" parameter must be of type "str".') _verify_optimization_hint_input(optimization_hints) -
    [docs] def __init__( @@ -151,6 +151,7 @@

    Source code for coremltools.models._compiled_model

    compute_units: _ComputeUnit = _ComputeUnit.ALL, function_name: _Optional[str] = None, optimization_hints: _Optional[dict] = None, + asset: _Optional[_MLModelAsset] = None, ): """ Loads a compiled Core ML model. @@ -174,6 +175,9 @@

    Source code for coremltools.models._compiled_model

    Keys are the names of the optimization hint, either 'reshapeFrequency' or 'specializationStrategy'. Values are enumeration values of type ``coremltools.ReshapeFrequency`` or ``coremltools.SpecializationStrategy``. + asset : MLModelAsset or None + The model asset. + Examples -------- .. sourcecode:: python @@ -188,23 +192,90 @@

    Source code for coremltools.models._compiled_model

    if function_name is None: function_name = "" - self._init_check(path, compute_units, function_name, optimization_hints) + self._init_check( + path=path, + asset=asset, + compute_units=compute_units, + function_name=function_name, + optimization_hints=optimization_hints, + ) + + path_or_asset = _expanduser(path) + if asset is not None: + path_or_asset = asset self.compute_unit = compute_units self.function_name = function_name + self.path_or_asset = path_or_asset if optimization_hints is not None: self.optimization_hints = optimization_hints.copy() else: self.optimization_hints = None - path = _expanduser(path) + asset_proxy = asset.__proxy__ if asset is not None else None if self.optimization_hints is not None: optimization_hints_str_vals = {k: v.name for k, v in self.optimization_hints.items()} else: optimization_hints_str_vals = {} + self._proxy = _MLModelProxy( + path, compute_units.name, function_name, optimization_hints_str_vals, asset_proxy + )
    + + +
    +[docs] + @classmethod + def from_asset( + cls, + asset: _MLModelAsset, + compute_units: _ComputeUnit = _ComputeUnit.ALL, + function_name: _Optional[str] = None, + optimization_hints: _Optional[dict] = None, + ) -> _Type["CompiledMLModel"]: + """ + Creates a CompiledModel instance from an asset. + + Parameters + ---------- + asset: MLModelAsset + The model asset to create the compiled model from. + + compute_units : coremltools.ComputeUnit + An enum with the following possible values: + - ``coremltools.ComputeUnit.ALL``: Use all compute units available, including the + neural engine. + - ``coremltools.ComputeUnit.CPU_ONLY``: Limit the model to only use the CPU. + - ``coremltools.ComputeUnit.CPU_AND_GPU``: Use both the CPU and GPU, but not the + neural engine. + - ``coremltools.ComputeUnit.CPU_AND_NE``: Use both the CPU and neural engine, but + not the GPU. Available only for macOS >= 13.0. + + optimization_hints : dict or None + Keys are the names of the optimization hint, either 'reshapeFrequency' or 'specializationStrategy'. + Values are enumeration values of type ``coremltools.ReshapeFrequency`` or ``coremltools.SpecializationStrategy``. - self._proxy = _MLModelProxy(path, compute_units.name, function_name, optimization_hints_str_vals)
    + Returns + ------- + CompiledMLModel + An instance of ``CompiledMLModel`` loaded from the provided asset. + + Examples + -------- + .. sourcecode:: python + + my_model_asset = MLModelAsset.from_memory(spec_data) + my_compiled_model = CompiledMLModel.from_asset(my_model_asset) + y = my_compiled_model.predict({"x": 3}) + + """ + return cls( + asset=asset, + path="", + compute_units=compute_units, + function_name=function_name, + optimization_hints=optimization_hints, + )
    diff --git a/docs/_build/html/_modules/coremltools/models/array_feature_extractor.html b/docs/_modules/coremltools/models/array_feature_extractor.html similarity index 94% rename from docs/_build/html/_modules/coremltools/models/array_feature_extractor.html rename to docs/_modules/coremltools/models/array_feature_extractor.html index 97d80b78c..dac2cb8fe 100644 --- a/docs/_build/html/_modules/coremltools/models/array_feature_extractor.html +++ b/docs/_modules/coremltools/models/array_feature_extractor.html @@ -1,11 +1,13 @@ + + - coremltools.models.array_feature_extractor — coremltools API Reference 8.0b1 documentation + coremltools.models.array_feature_extractor — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_modules/coremltools/models/compute_device.html b/docs/_modules/coremltools/models/compute_device.html new file mode 100644 index 000000000..63869cb09 --- /dev/null +++ b/docs/_modules/coremltools/models/compute_device.html @@ -0,0 +1,270 @@ + + + + + + + + coremltools.models.compute_device — coremltools API Reference 8.1 documentation + + + + + + + + + + + + + + + + + + + + + +
    + + +
    + +
    +
    +
    +
      +
    • + + +
    • +
    • +
    +
    +
    +
    +
    + +

    Source code for coremltools.models.compute_device

    +# Copyright (c) 2024, Apple Inc. All rights reserved.
    +#
    +# Use of this source code is governed by a BSD-3-clause license that can be
    +# found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
    +
    +from abc import ABC as _ABC
    +from typing import List as _List
    +
    +from coremltools import _logger
    +
    +try:
    +    from ..libcoremlpython import _MLModelProxy
    +except Exception as e:
    +    _logger.warning(f"Failed to load _MLModelProxy: {e}")
    +    _MLModelProxy = None
    +
    +try:
    +    from ..libcoremlpython import _MLCPUComputeDeviceProxy
    +except Exception as e:
    +    _logger.warning(f"Failed to load _MLCPUComputeDeviceProxy: {e}")
    +    _MLCPUComputeDeviceProxy = None
    +
    +try:
    +    from ..libcoremlpython import _MLGPUComputeDeviceProxy
    +except Exception as e:
    +    _logger.warning(f"Failed to load _MLGPUComputeDeviceProxy: {e}")
    +    _MLGPUComputeDeviceProxy = None
    +
    +try:
    +    from ..libcoremlpython import _MLNeuralEngineComputeDeviceProxy
    +except Exception as e:
    +    _logger.warning(f"Failed to load _MLNeuralEngineComputeDeviceProxy: {e}")
    +    _MLNeuralEngineComputeDeviceProxy = None
    +
    +
    +
    +[docs] +class MLComputeDevice(_ABC): + """ + Represents a compute device. + + The represented device is capable of running machine learning computations and other tasks like + analysis and processing of images, sound, etc. + """ + +
    +[docs] + @classmethod + def get_all_compute_devices( + cls, + ) -> _List["MLComputeDevice"]: + """ + Returns the list of all of the compute devices that are accessible. + + Returns + ------- + List[MLComputeDevice] + The accessible compute devices. + + Examples + -------- + .. sourcecode:: python + + compute_devices = ( + coremltools.models.compute_device.MLComputeDevice.get_all_compute_devices() + ) + + """ + return _MLModelProxy.get_all_compute_devices()
    +
    + + + +
    +[docs] +class MLCPUComputeDevice(MLComputeDevice): + """ + Represents a CPU compute device. + """ + + def __init__(self, proxy): + if _MLCPUComputeDeviceProxy is None or not isinstance(proxy, _MLCPUComputeDeviceProxy): + raise TypeError("The proxy parameter must be of type _MLCPUComputeDeviceProxy.") + self.__proxy__ = proxy
    + + + +
    +[docs] +class MLGPUComputeDevice(MLComputeDevice): + """ + Represents a GPU compute device. + """ + + def __init__(self, proxy): + if _MLGPUComputeDeviceProxy is None or not isinstance(proxy, _MLGPUComputeDeviceProxy): + raise TypeError("The proxy parameter must be of type _MLGPUComputeDeviceProxy.") + self.__proxy__ = proxy
    + + + +
    +[docs] +class MLNeuralEngineComputeDevice(MLComputeDevice): + """ + Represents a Neural Engine compute device. + """ + + def __init__(self, proxy): + if _MLNeuralEngineComputeDeviceProxy is None or not isinstance( + proxy, _MLNeuralEngineComputeDeviceProxy + ): + raise TypeError( + "The proxy parameter must be of type _MLNeuralEngineComputeDeviceProxy." + ) + self.__proxy__ = proxy + + @property + def total_core_count(self) -> int: + """ + Get the total number of cores in the Neural Engine. + + Returns + ------- + int + The total number of cores in the Neural Engine. + + Examples + -------- + .. sourcecode:: python + + compute_devices = ( + coremltools.models.compute_device.MLComputeDevice.get_all_compute_devices() + ) + compute_devices = filter( + lambda compute_device: isinstance( + compute_device, coremltools.models.compute_device.MLNeuralEngineComputeDevice + ), + compute_devices, + ) + neural_engine_compute_device = next(compute_devices, None) + neural_engine_core_count = ( + neural_engine_compute_device.total_core_count + if neural_engine_compute_device is not None + else 0 + ) + + """ + return self.__proxy__.get_total_core_count()
    + +
    + +
    +
    + +
    +
    +
    +
    + + + + \ No newline at end of file diff --git a/docs/_modules/coremltools/models/compute_plan.html b/docs/_modules/coremltools/models/compute_plan.html new file mode 100644 index 000000000..f4e9ea481 --- /dev/null +++ b/docs/_modules/coremltools/models/compute_plan.html @@ -0,0 +1,630 @@ + + + + + + + + coremltools.models.compute_plan — coremltools API Reference 8.1 documentation + + + + + + + + + + + + + + + + + + + + + +
    + + +
    + +
    +
    +
    +
      +
    • + + +
    • +
    • +
    +
    +
    +
    +
    + +

    Source code for coremltools.models.compute_plan

    +#  Copyright (c) 2024, Apple Inc. All rights reserved.
    +#
    +#  Use of this source code is governed by a BSD-3-clause license that can be
    +#  found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
    +
    +
    +from dataclasses import dataclass as _dataclass
    +from typing import Any as _Any
    +from typing import Dict as _Dict
    +from typing import List as _List
    +from typing import Optional as _Optional
    +from typing import Tuple as _Tuple
    +
    +from coremltools import ComputeUnit as _ComputeUnit
    +from coremltools import _logger
    +
    +from .compute_device import MLComputeDevice as _MLComputeDevice
    +
    +try:
    +    from ..libcoremlpython import _MLModelProxy
    +except Exception as e:
    +    _logger.warning(f"Failed to load _MLModelProxy: {e}")
    +    _MLModelProxy = None
    +
    +try:
    +    from ..libcoremlpython import _MLComputePlanProxy
    +except Exception as e:
    +    _logger.warning(f"Failed to load _MLComputePlanProxy: {e}")
    +    _MLComputePlanProxy = None
    +
    +
    +[docs] +@_dataclass(frozen=True) +class MLModelStructureNeuralNetworkLayer: + """ + Represents a layer in a neural network model structure. + + Attributes + ---------- + name : str + The name of the neural network layer. + + type : str + The type of the layer (e.g., 'Dense', 'Convolutional', etc.). + + input_names : List[str] + A list of names representing the inputs to this layer. + + output_names : List[str] + A list of names representing the outputs from this layer. + """ + name: str + type: str + input_names: _List[str] + output_names: _List[str] + __proxy__: _Any
    + + + +
    +[docs] +@_dataclass(frozen=True) +class MLModelStructureNeuralNetwork: + """ + Represents the structure of a neural network model. + + Attributes + ---------- + layers : List[MLModelStructureNeuralNetworkLayer] + The list of layers in the neural network. + """ + layers: _List[MLModelStructureNeuralNetworkLayer]
    + + + +
    +[docs] +@_dataclass(frozen=True) +class MLModelStructureProgramValue: + """ + Represents the value of a constant in an ML Program. + """ + pass
    + + + +
    +[docs] +@_dataclass(frozen=True) +class MLModelStructureProgramBinding: + """ + Represents a binding between a name and a program value in an ML Program. + This is either a previously defined name of a variable or a constant value in the Program. + + Attributes + ---------- + name : Optional[str] + The name of the variable, it can be None. + + value : Optional[MLModelStructureProgramValue] + The constant value, it can be None. + """ + name: _Optional[str] + value: _Optional[MLModelStructureProgramValue]
    + + + +
    +[docs] +@_dataclass(frozen=True) +class MLModelStructureProgramArgument: + """ + Represents an argument in an ML Program. + + Attributes + ---------- + bindings : List[MLModelStructureProgramBinding] + The list of bindings. + """ + bindings: _List[MLModelStructureProgramBinding]
    + + + +
    +[docs] +@_dataclass(frozen=True) +class MLModelStructureProgramValueType: + """ + Represents the type of a value or a variable in an ML Program. + """ + pass
    + + +
    +[docs] +@_dataclass(frozen=True) +class MLModelStructureProgramNamedValueType: + """ + Represents a parameter's name and type in an ML Program. + + Attributes + ---------- + name : str + The name of the parameter. + + type : MLModelStructureProgramValueType + The type of the parameter. + """ + name: str + type: MLModelStructureProgramValueType
    + + + +
    +[docs] +@_dataclass(frozen=True) +class MLModelStructureProgramOperation: + """ + Represents an operation in an ML Program. + + Attributes + ---------- + inputs : Dict[str, MLModelStructureProgramArgument] + The arguments to the Operation. + + operator_name : str + The name of the operator, e.g., "conv", "pool", "softmax", etc. + + outputs : List[MLModelStructureProgramNamedValueType] + The outputs of the Operation. + + blocks : List[MLModelStructureProgramBlock] + The list of nested blocks for loops and conditionals, e.g., a conditional block will have two entries here. + """ + inputs: _Dict[str, MLModelStructureProgramArgument] + operator_name: str + outputs: _List[MLModelStructureProgramNamedValueType] + blocks: _List["MLModelStructureProgramBlock"] + __proxy__: _Any
    + + + +
    +[docs] +@_dataclass(frozen=True) +class MLModelStructureProgramBlock: + """ + Represents a block in an ML Program. + + Attributes + ---------- + inputs : List[MLModelStructureProgramNamedValueType] + The named inputs to the block. + + operator_name : str + The name of the operator, e.g., "conv", "pool", "softmax", etc. + + outputs : List[MLModelStructureProgramNamedValueType] + The outputs of the Operation. + + blocks: List[MLModelStructureProgramBlock] + The list of nested blocks for loops and conditionals, e.g., a conditional block will have two entries here. + """ + inputs: _List[MLModelStructureProgramNamedValueType] + operations: _List[MLModelStructureProgramOperation] + output_names: _List[str]
    + + + +
    +[docs] +@_dataclass(frozen=True) +class MLModelStructureProgramFunction: + """ + Represents a function in an ML Program. + + Attributes + ---------- + inputs : List[MLModelStructureProgramNamedValueType] + The named inputs to the function. + + block : MLModelStructureProgramBlock + The active block in the function. + """ + + inputs: _List[MLModelStructureProgramNamedValueType] + block: MLModelStructureProgramBlock
    + + + +
    +[docs] +@_dataclass(frozen=True) +class MLModelStructureProgram: + """ + Represents the structure of an ML Program model. + + Attributes + ---------- + functions : List[MLModelStructureProgramFunction] + The functions in the program. + """ + functions: _List[MLModelStructureProgramFunction]
    + + + +
    +[docs] +@_dataclass(frozen=True) +class MLModelStructurePipeline: + """ + Represents the structure of a pipeline model. + + Attributes + ---------- + sub_models : Tuple[str, MLModelStructure] + The list of sub-models in the pipeline. + """ + sub_models: _Tuple[str, "MLModelStructure"]
    + + + +
    +[docs] +@_dataclass(frozen=True) +class MLModelStructure: + """ + Represents the structure of a model. + + Attributes + ---------- + neuralnetwork : Optional[MLModelStructureNeuralNetwork] + The structure of a NeuralNetwork model, if the model is a NeuralNetwork; otherwise None. + + program : Optional[MLModelStructureProgram] + The structure of an ML Program model, if the model is an ML Program; otherwise, None. + + pipeline : Optional[MLModelStructurePipeline] + The structure of a Pipeline model. if the model is a Pipeline; otherwise None. + """ + + neuralnetwork: _Optional[MLModelStructureNeuralNetwork] + program: _Optional[MLModelStructureProgram] + pipeline: _Optional[MLModelStructurePipeline] + +
    +[docs] + @classmethod + def load_from_path(cls, compiled_model_path: str) -> "MLModelStructure": + """ + Loads the structure of a compiled model. + + The path must be the location of the ``mlmodelc`` directory. + + Parameters + ---------- + compiled_model_path (str): The path to the compiled model. + + Returns + ------- + MLModelStructure + An instance of MLModelStructure. + + Examples + -------- + .. sourcecode:: python + + model_structure = coremltools.models.compute_plan.MLModelStructure.load_from_path( + model.get_compiled_path() + ) + + if model_structure.neuralNetwork is not None: + # Examine Neural network model. + pass + elif model_structure.program is not None: + # Examine ML Program model. + pass + elif model_structure.pipeline is not None: + # Examine Pipeline model. + pass + else: + # The model type is something else. + pass + + """ + + if _MLModelProxy is None: + raise ValueError("MLModelStructure is not supported.") + + return _MLModelProxy.get_model_structure(compiled_model_path)
    +
    + + + +
    +[docs] +@_dataclass(frozen=True) +class MLComputePlanDeviceUsage: + """ + Represents the anticipated compute devices that would be used for executing a layer/operation. + + Attributes + ---------- + preferred_compute_device : MLComputeDevice + The compute device that the framework prefers to execute the layer/operation. + + supported_compute_devices : List[MLComputeDevice] + The compute device that the framework prefers to execute the layer/operation. + """ + + preferred_compute_device: _MLComputeDevice + supported_compute_devices: _List[_MLComputeDevice]
    + + + +
    +[docs] +@_dataclass(frozen=True) +class MLComputePlanCost: + """ + Represents the estimated cost of executing a layer/operation. + + Attributes + ---------- + weight : float + The estimated workload of executing the operation over the total model execution. The value is between [0.0, 1.0]. + """ + + weight: float
    + + +
    +[docs] +class MLComputePlan: + """ + Represents the plan for executing a model. + + The application can use the plan to estimate the necessary cost and + resources of the model before running the predictions. + """ + + def __init__(self, proxy): + if _MLComputePlanProxy is None or not isinstance(proxy, _MLComputePlanProxy): + raise TypeError("The proxy parameter must be of type _MLComputePlanProxy.") + self.__proxy__ = proxy + + @property + def model_structure(self) -> MLModelStructure: + """ + Returns the model structure. + """ + return self.__proxy__.model_structure + +
    +[docs] + def get_compute_device_usage_for_mlprogram_operation( + self, + operation: MLModelStructureProgramOperation, + ) -> _Optional[MLComputePlanDeviceUsage]: + """ + Returns the estimated cost of executing an ML Program operation. + + Parameters + ---------- + operation : MLModelStructureProgramOperation + An ML Program operation. + + Returns + ------- + Optional[MLComputePlanDeviceUsage] + The anticipated compute devices that would be used for executing the operation or ``None`` if the usage couldn't be determined. + """ + return self.__proxy__.get_compute_device_usage_for_mlprogram_operation(operation)
    + + +
    +[docs] + def get_compute_device_usage_for_neuralnetwork_layer( + self, + layer: MLModelStructureNeuralNetworkLayer, + ) -> _Optional[MLComputePlanDeviceUsage]: + """ + Returns the estimated cost of executing a NeuralNetwork layer. + + Parameters + ---------- + operation MLModelStructureProgramOperation: + A NeuralNetwork layer. + + Returns + ------- + Optional[MLComputePlanDeviceUsage] + The anticipated compute devices that would be used for executing the layer or ``None`` if the usage couldn't be determined. + """ + return self.__proxy__.get_compute_device_usage_for_neuralnetwork_layer(layer)
    + + +
    +[docs] + def get_estimated_cost_for_mlprogram_operation( + self, + operation: MLModelStructureProgramOperation, + ) -> _Optional[MLComputePlanCost]: + """ + Returns the estimated cost of executing an ML Program operation. + + Parameters + ---------- + operation : MLModelStructureProgramOperation + An ML Program operation. + + Returns + ------- + Optional[MLComputePlanCost] + The estimated cost of executing the operation. + """ + return self.__proxy__.get_estimated_cost_for_mlprogram_operation(operation)
    + + +
    +[docs] + @classmethod + def load_from_path( + cls, + path: str, + compute_units: _ComputeUnit = _ComputeUnit.ALL, + ) -> "MLComputePlan": + """ + Loads the compute plan of a compiled model. + + The path must be the location of the ``mlmodelc`` directory. + + Parameters + ---------- + compiled_model_path : str + The path to the compiled model. + + Returns + ------- + The plan for executing the model. + + Examples + -------- + .. sourcecode:: python + + compute_plan = coremltools.models.compute_plan.MLComputePlan.load_from_path( + model.get_compiled_path() + ) + + if compute_plan.model_structure.program is None: + raise ValueError("Unexpected model type.") + + program = compute_plan.model_structure.program + mainFunction = program["main"] + for operation in mainFunction.block.operations: + # Get the compute device usage for the operation. + compute_device_usage = ( + compute_plan.get_compute_device_usage_for_mlprogram_operation(operation) + ) + # Get the estimated cost of executing the operation. + estimated_cost = compute_plan.get_estimated_cost_for_mlprogram_operation(operation) + + """ + + if _MLModelProxy is None: + raise ValueError("MLComputePlan is not supported.") + + return _MLModelProxy.get_compute_plan(path, compute_units.name)
    +
    + +
    + +
    +
    + +
    +
    +
    +
    + + + + \ No newline at end of file diff --git a/docs/_build/html/_modules/coremltools/models/feature_vectorizer.html b/docs/_modules/coremltools/models/feature_vectorizer.html similarity index 95% rename from docs/_build/html/_modules/coremltools/models/feature_vectorizer.html rename to docs/_modules/coremltools/models/feature_vectorizer.html index 37db2e590..15c3f49ea 100644 --- a/docs/_build/html/_modules/coremltools/models/feature_vectorizer.html +++ b/docs/_modules/coremltools/models/feature_vectorizer.html @@ -1,11 +1,13 @@ + + - coremltools.models.feature_vectorizer — coremltools API Reference 8.0b1 documentation + coremltools.models.feature_vectorizer — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/models/ml_program/compression_utils.html b/docs/_modules/coremltools/models/ml_program/compression_utils.html similarity index 96% rename from docs/_build/html/_modules/coremltools/models/ml_program/compression_utils.html rename to docs/_modules/coremltools/models/ml_program/compression_utils.html index cbfa1d19d..3f09de1b6 100644 --- a/docs/_build/html/_modules/coremltools/models/ml_program/compression_utils.html +++ b/docs/_modules/coremltools/models/ml_program/compression_utils.html @@ -1,11 +1,13 @@ + + - coremltools.models.ml_program.compression_utils — coremltools API Reference 8.0b1 documentation + coremltools.models.ml_program.compression_utils — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/models/model.html b/docs/_modules/coremltools/models/model.html similarity index 91% rename from docs/_build/html/_modules/coremltools/models/model.html rename to docs/_modules/coremltools/models/model.html index 6018a1eb5..b331e7514 100644 --- a/docs/_build/html/_modules/coremltools/models/model.html +++ b/docs/_modules/coremltools/models/model.html @@ -1,11 +1,13 @@ + + - coremltools.models.model — coremltools API Reference 8.0b1 documentation + coremltools.models.model — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -105,18 +100,18 @@

    Source code for coremltools.models.model

     import tempfile as _tempfile
     import warnings as _warnings
     from copy import deepcopy as _deepcopy
    +from typing import Dict as _Dict
    +from typing import List as _List
     from typing import Optional as _Optional
     
     import numpy as _np
     import numpy as _numpy
     
    -from coremltools import (
    -    ComputeUnit as _ComputeUnit,
    -    _logger as logger,
    -    proto as _proto,
    -    SpecializationStrategy as _SpecializationStrategy,
    -    ReshapeFrequency as _ReshapeFrequency,
    -)
    +from coremltools import ComputeUnit as _ComputeUnit
    +from coremltools import ReshapeFrequency as _ReshapeFrequency
    +from coremltools import SpecializationStrategy as _SpecializationStrategy
    +from coremltools import _logger as logger
    +from coremltools import proto as _proto
     from coremltools._deps import _HAS_TF_1, _HAS_TF_2, _HAS_TORCH
     from coremltools.converters.mil.mil.program import Program as _Program
     from coremltools.converters.mil.mil.scope import ScopeSource as _ScopeSource
    @@ -153,6 +148,13 @@ 

    Source code for coremltools.models.model

         logger.warning(f"Failed to load _MLModelProxy: {e}")
         _MLModelProxy = None
     
    +
    +try:
    +    from ..libcoremlpython import _MLModelAssetProxy
    +except Exception as e:
    +    logger.warning(f"Failed to load _MLModelAssetProxy: {e}")
    +    _MLModelAssetProxy = None
    +
     _HAS_PIL = True
     try:
         from PIL import Image as _PIL_IMAGE
    @@ -202,6 +204,8 @@ 

    Source code for coremltools.models.model

     _METADATA_SOURCE = "com.github.apple.coremltools.source"
     _METADATA_SOURCE_DIALECT = "com.github.apple.coremltools.source_dialect"
     
    +from .compute_device import MLComputeDevice as _MLComputeDevice
    +
     
     def _verify_optimization_hint_input(optimization_hint_input: _Optional[dict] = None) -> None:
         """
    @@ -275,6 +279,72 @@ 

    Source code for coremltools.models.model

             self.__proxy__ = proxy
     
     
    +
    +[docs] +class MLModelAsset: + """ + A class representing a compiled model asset. + + It supports two initialization methods: + - From a compiled model directory: The directory should have a '.mlmodelc' extension. + - From memory: Allows direct initialization using in-memory model data. + """ + def __init__(self, proxy): + if _MLModelAssetProxy is None or not isinstance(proxy, _MLModelAssetProxy): + raise TypeError("The proxy parameter must be of type _MLModelAssetProxy.") + self.__proxy__ = proxy + +
    +[docs] + @classmethod + def from_path( + cls, + compiled_model_path: str, + ) -> "MLModelAsset": + """ + Create an MLModelAsset instance from a compiled model path. + + Parameters + ---------- + compiled_model_path : str + The file path to the compiled model. + + Returns + ------- + MLModelAsset + An instance of MLModelAsset created from the specified path. + """ + return _MLModelProxy.create_model_asset_from_path(compiled_model_path)
    + + +
    +[docs] + @classmethod + def from_memory( + cls, + spec_data: bytes, + blob_mapping: _Dict[str, bytes] = {}, + ) -> "MLModelAsset": + """ + Create an MLModelAsset instance from in-memory data. + + Parameters + ---------- + spec_data : bytes + The specification data of the model. + + blob_mapping : Dict[str, bytes]) + A dictionary with blob path as the key and blob data as the value. + + Returns + ------- + MLModelAsset + An instance of MLModelAsset created from the provided memory data. + """ + return _MLModelProxy.create_model_asset_from_memory(spec_data, blob_mapping)
    +
    + +
    [docs] class MLModel: @@ -581,7 +651,13 @@

    Source code for coremltools.models.model

     
                 try:
                     return (
    -                    _MLModelProxy(filename, compute_units.name, function_name, optimization_hints_str_vals),
    +                    _MLModelProxy(
    +                        filename,
    +                        compute_units.name,
    +                        function_name,
    +                        optimization_hints_str_vals,
    +                        None,
    +                    ),
                         specification,
                         None,
                     )
    @@ -731,6 +807,9 @@ 

    Source code for coremltools.models.model

             the compiled model to persist, you need to make a copy.
     
             """
    +        if self.__proxy__ is None:
    +            raise Exception("This model was not loaded or compiled with the Core ML Framework.")
    +
             return self.__proxy__.get_compiled_model_path()
    @@ -849,7 +928,9 @@

    Source code for coremltools.models.model

         @staticmethod
         def _check_predict_data(data):
             if type(data) not in (list, dict):
    -            raise TypeError("\"data\" parameter must be either a dict or list of dict.")
    +            raise TypeError(
    +                f'"data" parameter must be either a dict or list of dict, but got {type(data)}.'
    +            )
             if type(data) == list and not all(map(lambda x: type(x) == dict, data)):
                 raise TypeError("\"data\" list must contain only dictionaries")
     
    @@ -918,6 +999,8 @@ 

    Source code for coremltools.models.model

             """
             if not _is_macos() or _macos_version() < (15, 0):
                 raise Exception("State functionality is only supported on macOS 15+")
    +        if self.__proxy__ is None:
    +            raise Exception("This model was not loaded with the Core ML Framework. Cannot get state.")
     
             return MLState(self.__proxy__.newState())
    @@ -1057,7 +1140,33 @@

    Source code for coremltools.models.model

             for given_input_name, given_input in input_dict.items():
                 if given_input_name not in model_input_to_types:
                     continue
    -            input_dict[given_input_name] = convert(given_input)
    + input_dict[given_input_name] = convert(given_input) + +
    +[docs] + @classmethod + def get_available_compute_devices(cls) -> _List[_MLComputeDevice]: + """ + The list of available compute devices for CoreML. + + Use the method to get the list of compute devices that MLModel's predict method can use. + + Some compute devices on the hardware are exclusive to the domain ML frameworks such as Vision and SoundAnalysis and + not available to Core ML framework. See also ``MLComputeDevice.get_all_compute_devices()``. + + Returns + ------- + The list of compute devices MLModel's predict method can use. + + Examples + -------- + .. sourcecode:: python + + compute_devices = coremltools.MLModel.get_available_compute_devices() + + """ + return _MLModelProxy.get_available_compute_devices()
    +
    diff --git a/docs/_build/html/_modules/coremltools/models/nearest_neighbors/builder.html b/docs/_modules/coremltools/models/nearest_neighbors/builder.html similarity index 98% rename from docs/_build/html/_modules/coremltools/models/nearest_neighbors/builder.html rename to docs/_modules/coremltools/models/nearest_neighbors/builder.html index 99bba1867..4d9ff3616 100644 --- a/docs/_build/html/_modules/coremltools/models/nearest_neighbors/builder.html +++ b/docs/_modules/coremltools/models/nearest_neighbors/builder.html @@ -1,11 +1,13 @@ + + - coremltools.models.nearest_neighbors.builder — coremltools API Reference 8.0b1 documentation + coremltools.models.nearest_neighbors.builder — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -326,7 +321,7 @@

    Source code for coremltools.models.nearest_neighbors.builder

    """ return self.spec.description.metadata.license - @author.setter + @license.setter def license(self, license): """ Add a license for the KNearestNeighborsClassifier model. diff --git a/docs/_build/html/_modules/coremltools/models/neural_network/builder.html b/docs/_modules/coremltools/models/neural_network/builder.html similarity index 99% rename from docs/_build/html/_modules/coremltools/models/neural_network/builder.html rename to docs/_modules/coremltools/models/neural_network/builder.html index 0dbf80cc0..285798422 100644 --- a/docs/_build/html/_modules/coremltools/models/neural_network/builder.html +++ b/docs/_modules/coremltools/models/neural_network/builder.html @@ -1,11 +1,13 @@ + + - coremltools.models.neural_network.builder — coremltools API Reference 8.0b1 documentation + coremltools.models.neural_network.builder — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/models/neural_network/flexible_shape_utils.html b/docs/_modules/coremltools/models/neural_network/flexible_shape_utils.html similarity index 99% rename from docs/_build/html/_modules/coremltools/models/neural_network/flexible_shape_utils.html rename to docs/_modules/coremltools/models/neural_network/flexible_shape_utils.html index 1812b83d9..f23294a10 100644 --- a/docs/_build/html/_modules/coremltools/models/neural_network/flexible_shape_utils.html +++ b/docs/_modules/coremltools/models/neural_network/flexible_shape_utils.html @@ -1,11 +1,13 @@ + + - coremltools.models.neural_network.flexible_shape_utils — coremltools API Reference 8.0b1 documentation + coremltools.models.neural_network.flexible_shape_utils — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/models/neural_network/quantization_utils.html b/docs/_modules/coremltools/models/neural_network/quantization_utils.html similarity index 99% rename from docs/_build/html/_modules/coremltools/models/neural_network/quantization_utils.html rename to docs/_modules/coremltools/models/neural_network/quantization_utils.html index a674511c2..e722527d7 100644 --- a/docs/_build/html/_modules/coremltools/models/neural_network/quantization_utils.html +++ b/docs/_modules/coremltools/models/neural_network/quantization_utils.html @@ -1,11 +1,13 @@ + + - coremltools.models.neural_network.quantization_utils — coremltools API Reference 8.0b1 documentation + coremltools.models.neural_network.quantization_utils — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/models/neural_network/update_optimizer_utils.html b/docs/_modules/coremltools/models/neural_network/update_optimizer_utils.html similarity index 97% rename from docs/_build/html/_modules/coremltools/models/neural_network/update_optimizer_utils.html rename to docs/_modules/coremltools/models/neural_network/update_optimizer_utils.html index d9784b0d5..36c4d2acc 100644 --- a/docs/_build/html/_modules/coremltools/models/neural_network/update_optimizer_utils.html +++ b/docs/_modules/coremltools/models/neural_network/update_optimizer_utils.html @@ -1,11 +1,13 @@ + + - coremltools.models.neural_network.update_optimizer_utils — coremltools API Reference 8.0b1 documentation + coremltools.models.neural_network.update_optimizer_utils — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/models/pipeline.html b/docs/_modules/coremltools/models/pipeline.html similarity index 98% rename from docs/_build/html/_modules/coremltools/models/pipeline.html rename to docs/_modules/coremltools/models/pipeline.html index 86f13f485..851d64eec 100644 --- a/docs/_build/html/_modules/coremltools/models/pipeline.html +++ b/docs/_modules/coremltools/models/pipeline.html @@ -1,11 +1,13 @@ + + - coremltools.models.pipeline — coremltools API Reference 8.0b1 documentation + coremltools.models.pipeline — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/models/tree_ensemble.html b/docs/_modules/coremltools/models/tree_ensemble.html similarity index 98% rename from docs/_build/html/_modules/coremltools/models/tree_ensemble.html rename to docs/_modules/coremltools/models/tree_ensemble.html index d1698bf42..008d471f4 100644 --- a/docs/_build/html/_modules/coremltools/models/tree_ensemble.html +++ b/docs/_modules/coremltools/models/tree_ensemble.html @@ -1,11 +1,13 @@ + + - coremltools.models.tree_ensemble — coremltools API Reference 8.0b1 documentation + coremltools.models.tree_ensemble — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/models/utils.html b/docs/_modules/coremltools/models/utils.html similarity index 87% rename from docs/_build/html/_modules/coremltools/models/utils.html rename to docs/_modules/coremltools/models/utils.html index c3dd6e7f2..a3275c93d 100644 --- a/docs/_build/html/_modules/coremltools/models/utils.html +++ b/docs/_modules/coremltools/models/utils.html @@ -1,11 +1,13 @@ + + - coremltools.models.utils — coremltools API Reference 8.0b1 documentation + coremltools.models.utils — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -101,7 +96,6 @@

    Source code for coremltools.models.utils

     """
     Utilities for the entire package.
     """
    -from collections import OrderedDict as _OrderedDict
     import copy as _copy
     import gc as _gc
     import math as _math
    @@ -111,21 +105,25 @@ 

    Source code for coremltools.models.utils

     import sys as _sys
     import tempfile as _tempfile
     import warnings as _warnings
    +from collections import OrderedDict as _OrderedDict
     from collections.abc import Iterable as _Iterable
    +from copy import deepcopy as _deepcopy
     from functools import lru_cache as _lru_cache
     from typing import Callable as _Callable
     from typing import Dict as _Dict
    +from typing import Iterable as _Iterable
     from typing import List as _List
     from typing import Optional as _Optional
     from typing import Tuple as _Tuple
    +from typing import Type as _Type
     from typing import Union as _Union
     
     import numpy as _np
     
     import coremltools as _ct
    -from coremltools import _logger
     from coremltools import _SPECIFICATION_VERSION_IOS_16, _SPECIFICATION_VERSION_IOS_18
     from coremltools import ComputeUnit as _ComputeUnit
    +from coremltools import _logger
     from coremltools import proto as _proto
     from coremltools.converters.mil import mil as _mil
     from coremltools.converters.mil.frontend.milproto import load as _milproto_to_pymil
    @@ -136,7 +134,9 @@ 

    Source code for coremltools.models.utils

         WeightRandomizer as _WeightRandomizer,
     )
     from coremltools.converters.mil.mil.passes.graph_pass import AbstractGraphPass as _AbstractGraphPass
    -from coremltools.converters.mil.mil.passes.helper import block_context_manager as _block_context_manager
    +from coremltools.converters.mil.mil.passes.helper import (
    +    block_context_manager as _block_context_manager,
    +)
     from coremltools.converters.mil.mil.passes.pass_pipeline import (
         PassPipelineManager as _PassPipelineManager,
     )
    @@ -1605,7 +1605,7 @@ 

    Source code for coremltools.models.utils

             self, model_path: str, src_function_name: str, target_function_name: str
         ) -> None:
             """
    -        Insert a ``src_function_name`` function from ``model_path`` as the 
    +        Insert a ``src_function_name`` function from ``model_path`` as the
             ``target_function_name`` function in the multifunction descriptor.
             """
             self._add_modelpath_to_cache(model_path)
    @@ -1793,6 +1793,7 @@ 

    Source code for coremltools.models.utils

             defaultFunctionName=default_function_name,
         )
         multifunction_prog.skip_all_passes = True
    +    multifunction_prog.export_as_multifunction = True
         mlmodel = _mil_convert(
             multifunction_prog,
             convert_to="mlprogram",
    @@ -1800,7 +1801,6 @@ 

    Source code for coremltools.models.utils

             specification_version=spec_version,
             compute_units=_ct.ComputeUnit.CPU_ONLY,
             model_description=model_description,
    -        export_multi_functions=True,
             skip_model_load=True,
         )
         mlmodel.save(destination_path)
    @@ -1915,30 +1915,27 @@

    Source code for coremltools.models.utils

         )
         _PassPipelineManager.apply_pipeline(dynamic_shape_prog, pass_pipeline)
     
    -    # Weights are duplicated in each materialized new function
    -    # By default, graph pass const_deduplication will not deduplicate across functions,
    -    # so we need to call it explicitly here
    -    const_deduplication_pass = _PASS_REGISTRY["common::const_deduplication"]
    -    const_deduplication_pass._deduplicate_const_across_functions(dynamic_shape_prog)
    -
    -    export_multi_functions = True
         # If source function is the only function in source model,
         # and source function is replaced with materialization,
         # and materialization does not create other functions,
    +    # and source function name is "main",
         # then we will end up with a unifunction model
         # Core ML distinguishs "unifunction model" and "multifunction model with only 1 function"
         if (
             len(dynamic_shape_prog.functions) == 1
             and len(function_name_to_materialization_map) == 1
             and source_function_name in function_name_to_materialization_map
    +        and source_function_name == "main"
         ):
    -        export_multi_functions = False
    +        dynamic_shape_prog.export_as_multifunction = False
    +    else:
    +        dynamic_shape_prog.export_as_multifunction = True
     
         # Multifunciton is added in iOS 18, so
         # * if export multifunction, then specification version has to be iOS 18+
         # * else, specification version can be the same as original version
         specification_version = dynamic_shape_mlmodel._spec.specificationVersion
    -    if export_multi_functions:
    +    if dynamic_shape_prog.export_as_multifunction:
             specification_version = max(_ct.target.iOS18, specification_version)
     
         dynamic_shape_prog.skip_all_passes = True
    @@ -1948,7 +1945,6 @@ 

    Source code for coremltools.models.utils

             convert_to="mlprogram",
             specification_version=specification_version,
             compute_units=_ct.ComputeUnit.CPU_ONLY,
    -        export_multi_functions=export_multi_functions,
             skip_model_load=True,
         )
         materialized_mlmodel.save(destination_path)
    @@ -2059,9 +2055,10 @@

    Source code for coremltools.models.utils

             )
         """
         # We do the lazy import to prevent circular import
    -    from . import MLModel
         from coremltools.converters.mil.converter import mil_convert as _mil_convert
     
    +    from . import MLModel
    +
         def get_pymil_prog_and_spec_from_model(model):
     
             # get the model spec and weight directory
    @@ -2081,7 +2078,7 @@ 

    Source code for coremltools.models.utils

             )
             if len(prog.functions) > 1 or "main" not in prog.functions:
                 raise ValueError("'bisect_model' only support model with a single 'main' function.")
    -        
    +
             func = prog.functions["main"]
             func.operations = list(func.operations)
     
    @@ -2203,8 +2200,10 @@ 

    Source code for coremltools.models.utils

     ) -> None:
         """Verifies the end-to-end output correctness of full (original) model versus chunked models"""
         # lazy import avoids circular error
    -    from coremltools.converters.mil.testing_utils import random_gen_input_feature_type as random_gen_input_feature_type
         from coremltools.converters.mil.testing_utils import compute_snr_and_psnr
    +    from coremltools.converters.mil.testing_utils import (
    +        random_gen_input_feature_type as random_gen_input_feature_type,
    +    )
     
         def report_correctness(original_outputs: _np.ndarray, final_outputs: _np.ndarray, log_prefix: str):
             """ Report PSNR values across two compatible tensors.
    @@ -2229,7 +2228,7 @@ 

    Source code for coremltools.models.utils

                 )
             return final_psnr
     
    -    
    +
         # Generate inputs for first chunk and full model
         input_dict = {}
         for input_desc in full_model._spec.description.input:
    @@ -2404,6 +2403,214 @@ 

    Source code for coremltools.models.utils

         return prog
     
     
    +
    +[docs] +def change_input_output_tensor_type( + ml_model: "_ct.models.MLModel", + from_type: _proto.FeatureTypes_pb2.ArrayFeatureType, + to_type: _proto.FeatureTypes_pb2.ArrayFeatureType, + function_names: _Optional[_List[str]] = None, + input_names: _Optional[_List[str]] = None, + output_names: _Optional[_List[str]] = None, +) -> "_ct.models.model.MLModel": + """ + Change the tensor data types of Core ML model inputs / outputs. Supported types are FLOAT16, FLOAT32. + + Parameters + ---------- + ml_model: MLModel + A Core ML model that needs to change its input/output type. + Note: + - the original model is not modified, the model with updated types is returned as a new instance. + - only an mlProgram is supported (not pipelines, not neural networks). + + from_type: + The type that should be changed from. + + to_type: + The type that will be used instead of all the `from_type` type. + + function_names: + Optional list of function names where the input/output needs to be changed. If not specified, only the "main" + function will be updated. + + input_names: + Optional list of input names that should be updated (by default none of the inputs will be updated). + + output_names: + Optional list of output names that should be updated (by default all the outputs that match the `from_type` + type will be updated). + + Examples + -------- + .. sourcecode:: python + + from coremltools.models.model import MLModel + from coremltools.utils import change_input_output_tensor_type + from coremltools.proto.FeatureTypes_pb2 import ArrayFeatureType + + model = MLModel("my_model.mlpackage") + updated_model = change_input_output_tensor_type( + ml_model=model, + from_type=ArrayFeatureType.FLOAT32, + to_type=ArrayFeatureType.FLOAT16, + ) + updated_model.save("my_updated_model.mlpackage") + """ + # We do the lazy import to prevent circular import + from coremltools.converters.mil.converter import mil_convert as _mil_convert + + SUPPORTED_TYPES = ( + _proto.FeatureTypes_pb2.ArrayFeatureType.FLOAT16, + _proto.FeatureTypes_pb2.ArrayFeatureType.FLOAT32, + ) + + def _get_model_spec(model: _ct.models.MLModel) -> _proto.Model_pb2.Model: + if not isinstance(model, _ct.models.MLModel): + raise ValueError(f"input model must be of type ct.models.MLModel, actual type is {type(model)})") + model_spec = model.get_spec() + + model_type = model_spec.WhichOneof("Type") + if model_type != "mlProgram": + raise ValueError(f"input model must be an mlProgram, actual model type is {model_type}") + + return model_spec + + def _get_dtype(feature_type: _proto.FeatureTypes_pb2.ArrayFeatureType) -> _Type[_mil.types.double]: + if feature_type == _proto.FeatureTypes_pb2.ArrayFeatureType.FLOAT16: + return _mil.types.fp16 + if feature_type == _proto.FeatureTypes_pb2.ArrayFeatureType.FLOAT32: + return _mil.types.fp32 + raise ValueError(f"invalid feature type: {feature_type}, supported only FLOAT16, FLOAT32") + + def _sanitize_names(names: _Optional[_List[str]], desc_list: _Iterable, default: _List[str]) -> _List[str]: + if names is None: + names = default + return [x.name for x in desc_list if "*" in names or x.name in names] + + def _eligible_feature_desc( + feature_desc: _proto.Model_pb2.FeatureDescription, + names: _List[str], + data_type: _proto.FeatureTypes_pb2.ArrayFeatureType, + ) -> bool: + if feature_desc.name not in names: + _logger.debug(f"ignoring feature {feature_desc.name} as it's not in the list of required names {names}") + return False + + feature_type = feature_desc.type.WhichOneof("Type") + if feature_type != "multiArrayType": + _logger.debug(f"ignoring output {feature_desc.name} (type: {feature_type})") + return False + + feature_data_type = feature_desc.type.multiArrayType.dataType + if feature_data_type != data_type: + _logger.debug(f"ignoring output tensor {feature_desc.name} (data type: {feature_data_type})") + return False + + return True + + def _get_input_vars(var_name: str) -> _Iterable[_Tuple[_Optional[_mil.block.Function], _Optional[_mil.Var]]]: + for name in function_names: + func = prog.functions[name] + var = next(iter([v for k, v in func.inputs.items() if k == var_name]), None) + if var: + if func.opset_version < _ct.target.iOS16: + _logger.warning(f"upgrading opset_version for function {func.name} to iOS16") + func.opset_version = _ct.target.iOS16 + yield func, var + + def _get_output_vars(var_name: str) -> _Iterable[_Tuple[_Optional[_mil.block.Function], _Optional[_mil.Var]]]: + for name in function_names: + func = prog.functions[name] + var = next(iter([v for v in func.outputs if v.name == var_name]), None) + if var: + if func.opset_version < _ct.target.iOS16: + _logger.warning(f"upgrading opset_version for function {func.name} to iOS16") + func.opset_version = _ct.target.iOS16 + yield func, var + + def _cast_input_type( + feature_desc: _proto.Model_pb2.FeatureDescription, + feature_var: _mil.Var, + first_operation: _mil.Operation, + ) -> None: + with first_operation.enclosing_block: + from_dtype_str = f"fp{from_dtype.get_bitwidth()}" + var_name = feature_desc.name + f"_to_{from_dtype_str}" + x = _mb.cast(x=feature_var, dtype=from_dtype_str, name=var_name, before_op=first_operation) + x.op.enclosing_block.replace_uses_of_var_after_op( + anchor_op=x.op, + old_var=feature_var, + new_var=x, + ) + feature_desc.type.multiArrayType.dataType = to_type + feature_var._sym_type = _mil.types.tensor(to_dtype, feature_var.sym_type.get_shape()) + + def _cast_output_type(feature_desc: _proto.Model_pb2.FeatureDescription, feature_var: _mil.Var) -> None: + with feature_var.op.enclosing_block: + to_dtype_str = f"fp{to_dtype.get_bitwidth()}" + var_name = feature_desc.name + f"_to_{to_dtype_str}" + x = _mb.cast(x=feature_var, dtype=to_dtype_str, name=var_name) + x.op.enclosing_block.replace_uses_of_var_after_op( + anchor_op=x.op, + old_var=feature_var, + new_var=x, + ) + x.name = var_name + feature_desc.name = var_name + feature_desc.type.multiArrayType.dataType = to_type + + ml_model_spec = _get_model_spec(model=ml_model) + + if from_type not in SUPPORTED_TYPES: + raise ValueError(f"not supported from_type: must be an ArrayFeatureType of {SUPPORTED_TYPES}") + if to_type not in SUPPORTED_TYPES: + raise ValueError(f"not supported to_type: must be an ArrayFeatureType of {SUPPORTED_TYPES}") + + if from_type == to_type: + return _deepcopy(ml_model) + + from_dtype = _get_dtype(feature_type=from_type) + to_dtype = _get_dtype(feature_type=to_type) + + input_names = _sanitize_names(names=input_names, desc_list=ml_model_spec.description.input, default=[]) + output_names = _sanitize_names(names=output_names, desc_list=ml_model_spec.description.output, default=["*"]) + + prog = _milproto_to_pymil.load( + model_spec=ml_model_spec, + specification_version=ml_model_spec.specificationVersion, + file_weights_dir=ml_model.weights_dir, + ) + + if not function_names: + function_names = ["main"] + _logger.debug(f"functions: {function_names}") + for func_name in function_names: + if func_name not in prog.functions: + raise ValueError(f"function '{func_name}' not defined in the model") + + for desc_input in ml_model_spec.description.input: + if not _eligible_feature_desc(feature_desc=desc_input, names=input_names, data_type=from_type): + continue + for function, input_var in _get_input_vars(var_name=desc_input.name): + _cast_input_type(feature_desc=desc_input, feature_var=input_var, first_operation=function.operations[0]) + + for desc_output in ml_model_spec.description.output: + if not _eligible_feature_desc(feature_desc=desc_output, names=output_names, data_type=from_type): + continue + for function, output_var in _get_output_vars(var_name=desc_output.name): + _cast_output_type(feature_desc=desc_output, feature_var=output_var) + + model_opset_version = max(function.opset_version.value for function in prog.functions.values()) + return _mil_convert( + prog, + convert_to="mlprogram", + convert_from="milinternal", + specification_version=model_opset_version, + compute_units=ml_model.compute_unit, + model_description=ml_model_spec.description, + )
    +
    diff --git a/docs/_build/html/_modules/coremltools/optimize/coreml/_config.html b/docs/_modules/coremltools/optimize/coreml/_config.html similarity index 99% rename from docs/_build/html/_modules/coremltools/optimize/coreml/_config.html rename to docs/_modules/coremltools/optimize/coreml/_config.html index 075b3697e..1ccb1ccb4 100644 --- a/docs/_build/html/_modules/coremltools/optimize/coreml/_config.html +++ b/docs/_modules/coremltools/optimize/coreml/_config.html @@ -1,11 +1,13 @@ + + - coremltools.optimize.coreml._config — coremltools API Reference 8.0b1 documentation + coremltools.optimize.coreml._config — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -685,7 +680,7 @@

    Source code for coremltools.optimize.coreml._config

    nbits: int Number of bits per weight. Required for ``kmeans`` or ``uniform`` mode, but must not be set for ``unique`` or ``custom`` mode. A LUT would have - 2\ :sup:`nbits` entries, where `nbits` can be ``{1, 2, 3, 4, 6, 8}``. + 2\\ :sup:`nbits` entries, where `nbits` can be ``{1, 2, 3, 4, 6, 8}``. mode: str Determine how the LUT is constructed by specifying one of the following: diff --git a/docs/_build/html/_modules/coremltools/optimize/coreml/_post_training_quantization.html b/docs/_modules/coremltools/optimize/coreml/_post_training_quantization.html similarity index 98% rename from docs/_build/html/_modules/coremltools/optimize/coreml/_post_training_quantization.html rename to docs/_modules/coremltools/optimize/coreml/_post_training_quantization.html index 68023236c..ed1dd4bd5 100644 --- a/docs/_build/html/_modules/coremltools/optimize/coreml/_post_training_quantization.html +++ b/docs/_modules/coremltools/optimize/coreml/_post_training_quantization.html @@ -1,11 +1,13 @@ + + - coremltools.optimize.coreml._post_training_quantization — coremltools API Reference 8.0b1 documentation + coremltools.optimize.coreml._post_training_quantization — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -194,9 +189,9 @@

    Source code for coremltools.optimize.coreml._post_training_quantization

    Quantized weights are computed as follows: .. math:: - w_q = cast\_to\_8\_bit\_integer(w_r / s + cast\_to\_float(z)) + w_q = cast\\_to\\_8\\_bit\\_integer(w_r / s + cast\\_to\\_float(z)) - Note: :math:`cast\_to\_8\_bit\_integer` is the process of clipping the input to range ``[low, high]`` followed by rounding and casting to 8-bit integer. + Note: :math:`cast\\_to\\_8\\_bit\\_integer` is the process of clipping the input to range ``[low, high]`` followed by rounding and casting to 8-bit integer. In ``"linear"`` mode, ``s, z`` are computed by mapping the original float range ``[A, B]`` into the 8-bit integer range ``[-128, 127]`` or ``[0, 255]``. That is, you are solving the diff --git a/docs/_build/html/_modules/coremltools/optimize/coreml/experimental/_post_training_quantization.html b/docs/_modules/coremltools/optimize/coreml/experimental/_post_training_quantization.html similarity index 88% rename from docs/_build/html/_modules/coremltools/optimize/coreml/experimental/_post_training_quantization.html rename to docs/_modules/coremltools/optimize/coreml/experimental/_post_training_quantization.html index a3b849255..e0e5e52ff 100644 --- a/docs/_build/html/_modules/coremltools/optimize/coreml/experimental/_post_training_quantization.html +++ b/docs/_modules/coremltools/optimize/coreml/experimental/_post_training_quantization.html @@ -1,11 +1,13 @@ + + - coremltools.optimize.coreml.experimental._post_training_quantization — coremltools API Reference 8.0b1 documentation + coremltools.optimize.coreml.experimental._post_training_quantization — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -102,6 +97,7 @@

    Source code for coremltools.optimize.coreml.experimental._post_training_quan from typing import Dict, List, Optional, Union import numpy as np +from tqdm import tqdm from coremltools import _SPECIFICATION_VERSION_IOS_17 from coremltools import _logger as logger @@ -125,6 +121,7 @@

    Source code for coremltools.optimize.coreml.experimental._post_training_quan mlmodel: _MLModel, config: _OptimizationConfig, sample_data: List[Dict[Optional[str], np.ndarray]], + calibration_op_group_size: int = -1, ): """ Utility function to convert a float precision MLModel of type ``mlprogram``, which uses @@ -152,6 +149,14 @@

    Source code for coremltools.optimize.coreml.experimental._post_training_quan method for the mlmodel. More specifically, the input name need to be specified in the data, unless it's a single input model where the name will be auto inferred. + calibration_op_group_size: int + While running inference during calibration, only have `calibration_op_group_size` of intermediate outputs + appended to outputs at a time. If the model is very large, it could lead to the temperary model having + thousands of outputs, which may lead to model hanging forever during model loading. To work around this + issue, intermediate outputs are grouped into smaller groups, where each time a temperary model will only + have `calibration_op_group_size` outputs. By default (op_group_size = -1), op_group_size is equal to the + number of valid intermediate ops. + Returns ------- model: MLModel @@ -195,7 +200,7 @@

    Source code for coremltools.optimize.coreml.experimental._post_training_quan ### Apply four major graph passes in order. # Insert prefix quantize/dequantize pairs to valid patterns. - logger.info("Running compression pass linear_quantize_activations phase 1/4 ...") + logger.info("Running compression pass linear_quantize_activations phase 1/3 ...") linear_activation_quantizer = PASS_REGISTRY[ "compression::insert_prefix_quantize_dequantize_pair" ] @@ -203,6 +208,8 @@

    Source code for coremltools.optimize.coreml.experimental._post_training_quan config, fake_compression=False ) linear_activation_quantizer.set_options([PassOption("config", config)]) + activation_stats = _get_activation_calibration_stats(mlmodel, sample_data) + linear_activation_quantizer.set_options([PassOption("activation_stats", activation_stats)]) prog = _model_utils._apply_graph_pass( mlmodel, @@ -214,22 +221,15 @@

    Source code for coremltools.optimize.coreml.experimental._post_training_quan ) # Insert suffix quantize/dequantize pairs to valid patterns. - logger.info("Running compression pass linear_quantize_activations phase 2/4 ...") + logger.info("Running compression pass linear_quantize_activations phase 2/3 ...") graph_pass = PASS_REGISTRY["compression::insert_suffix_quantize_dequantize_pair"] graph_pass.set_options([PassOption("config", config)]) - graph_pass(prog) - prog.validate() - - # Updating scale/zero_point in all quantize/dequantize ops calculated by calibration data. - logger.info("Running compression pass linear_quantize_activations phase 3/4 ...") - activation_stats = _get_activation_calibration_stats(mlmodel, sample_data) - graph_pass = PASS_REGISTRY["compression::update_quantize_dequantize"] graph_pass.set_options([PassOption("activation_stats", activation_stats)]) graph_pass(prog) prog.validate() # Re-use exsiting path to dedup quantize/dequantize operations. - logger.info("Running compression pass linear_quantize_activations phase 4/4 ...") + logger.info("Running compression pass linear_quantize_activations phase 3/3 ...") graph_pass = PASS_REGISTRY["common::dequantize_quantize_pair_elimination"] graph_pass(prog) prog.validate() @@ -303,7 +303,7 @@

    Source code for coremltools.optimize.coreml.experimental._post_training_quan def _adjust_concat_surrounding_activation_stats( - concat_op_info_list: List, activation_stats_dict: Dict[str, Dict[str, float]] + concat_op_info_list: List[List[str]], activation_stats_dict: Dict[str, Dict[str, float]] ) -> None: """ Adjust the activation calibration stats of inputs/outputs to the same concat ops to maximize hardware efficiency. @@ -353,7 +353,9 @@

    Source code for coremltools.optimize.coreml.experimental._post_training_quan def _get_activation_calibration_stats( - fpmodel: _MLModel, sample_data: List[Dict[str, np.ndarray]] + fpmodel: _MLModel, + sample_data: List[Dict[str, np.ndarray]], + calibration_op_group_size: int = -1, ) -> Dict[str, Dict[str, float]]: """ Calibration and store a dict of intermediate tensor stats. @@ -364,6 +366,13 @@

    Source code for coremltools.optimize.coreml.experimental._post_training_quan Path to fp16/fp32 "model.mlpackage". (Expect the orginal mlmodel, not the one with quantize and dequant op) sample_data: list[dict] Data for calibration. + calibration_op_group_size: int + While running inference during calibration, only have `calibration_op_group_size` of intermediate outputs + appended to outputs at a time. If the model is very large, it could lead to the temperary model having + thousands of outputs, which may lead to model hanging forever during model loading. To work around this + issue, intermediate outputs are grouped into smaller groups, where each time a temperary model will only + have `calibration_op_group_size` outputs. By default (op_group_size = -1), op_group_size is equal to the + number of valid intermediate ops. Returns ------- @@ -378,8 +387,6 @@

    Source code for coremltools.optimize.coreml.experimental._post_training_quan "Running compression pass linear_quantize_activations: calibration may take a while ..." ) - analyzed = 0 - tried = 0 debugger = ModelDebugger(fpmodel) activation_stats_dict = defaultdict(dict) intermediate_output_names = debugger.get_intermediate_output_names( @@ -406,30 +413,17 @@

    Source code for coremltools.optimize.coreml.experimental._post_training_quan intermediate_output_names.remove(intermediate_output_name) # Get data ranges for all intermeditate outputs. - for data in sample_data: - tried += 1 - try: - debugger.step( - step_fn=ModelDebugger.check_intermediate_output, - inputs=data, - activation_stats_dict=activation_stats_dict, - intermediate_output_names=intermediate_output_names, - ) - analyzed += 1 - logger.warning( - "Running compression pass linear_quantize_activations: calibrating sample {}/{} succeeds.".format( - tried, len(sample_data) - ) - ) - - except Exception as e: - logger.error(e) - logger.error( - "Running compression pass linear_quantize_activations: calibrating sample {}/{} fails.".format( - tried, len(sample_data) - ) - ) - continue + for data in tqdm( + sample_data, + desc="Running compression pass linear_quantize_activations", + unit=" calibration samples", + ): + debugger.step( + inputs=data, + activation_stats_dict=activation_stats_dict, + intermediate_output_names=intermediate_output_names, + op_group_size=calibration_op_group_size, + ) # Handle a special case - concat ops. _adjust_concat_surrounding_activation_stats( diff --git a/docs/_build/html/_modules/coremltools/optimize/torch/_utils/python_utils.html b/docs/_modules/coremltools/optimize/torch/_utils/python_utils.html similarity index 87% rename from docs/_build/html/_modules/coremltools/optimize/torch/_utils/python_utils.html rename to docs/_modules/coremltools/optimize/torch/_utils/python_utils.html index 0d9e60316..026609817 100644 --- a/docs/_build/html/_modules/coremltools/optimize/torch/_utils/python_utils.html +++ b/docs/_modules/coremltools/optimize/torch/_utils/python_utils.html @@ -1,11 +1,13 @@ + + - coremltools.optimize.torch._utils.python_utils — coremltools API Reference 8.0b1 documentation + coremltools.optimize.torch._utils.python_utils — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -124,41 +119,41 @@

    Source code for coremltools.optimize.torch._utils.python_utils

    REGISTRY = None @classmethod - def register(cls, name: str): + def register(cls, key: _Any): if cls.REGISTRY is None: cls.REGISTRY = _OrderedDict() def inner_wrapper(wrapped_obj): - if name in cls.REGISTRY: + if key in cls.REGISTRY: _logger.warning( - f"Name: {name} is already registered with object: {cls.REGISTRY[name].__name__} " + f"Key: {key} is already registered with object: {cls.REGISTRY[key].__name__} " f"in registry: {cls.__name__}" - f"Over-writing the name with new class: {wrapped_obj.__name__}" + f"Over-writing the key with new class: {wrapped_obj.__name__}" ) - cls.REGISTRY[name] = wrapped_obj + cls.REGISTRY[key] = wrapped_obj return wrapped_obj return inner_wrapper @classmethod - def _get_object(cls, name: str): - if name in cls.REGISTRY: - return cls.REGISTRY[name] + def _get_object(cls, key: _Any): + if key in cls.REGISTRY: + return cls.REGISTRY[key] raise NotImplementedError( - f"No object is registered with name: {name} in registry {cls.__name__}." + f"No object is registered with key: {key} in registry {cls.__name__}." ) class ClassRegistryMixin(RegistryMixin): @classmethod - def get_class(cls, name: str): - return cls._get_object(name) + def get_class(cls, key: _Any): + return cls._get_object(key) class FunctionRegistryMixin(RegistryMixin): @classmethod - def get_function(cls, name: str): - return cls._get_object(name) + def get_function(cls, key: _Any): + return cls._get_object(key) class DictableDataClass: diff --git a/docs/_build/html/_modules/coremltools/optimize/torch/layerwise_compression/algorithms.html b/docs/_modules/coremltools/optimize/torch/layerwise_compression/algorithms.html similarity index 97% rename from docs/_build/html/_modules/coremltools/optimize/torch/layerwise_compression/algorithms.html rename to docs/_modules/coremltools/optimize/torch/layerwise_compression/algorithms.html index 5f2eef99c..38e1c4790 100644 --- a/docs/_build/html/_modules/coremltools/optimize/torch/layerwise_compression/algorithms.html +++ b/docs/_modules/coremltools/optimize/torch/layerwise_compression/algorithms.html @@ -1,11 +1,13 @@ + + - coremltools.optimize.torch.layerwise_compression.algorithms — coremltools API Reference 8.0b1 documentation + coremltools.optimize.torch.layerwise_compression.algorithms — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -172,7 +167,7 @@

    Source code for coremltools.optimize.torch.layerwise_compression.algorithms< weights are quantized with zero point as zero. When it is set to ``QuantizationScheme.affine``, zero point can be set anywhere in the range of values allowed for the quantized weight. Defaults to ``QuantizationScheme.symmetric``. - block_size (:obj:`int`): When ``block_size`` is specified, ``block_size`` number of values will share the same quantization + block_size (:obj:`int`): When ``block_size`` is specified, ``block_size`` number of values will share the same quantization parameters of scale, as well as the same zero point when applicable, across the input channel axis. Defaults to ``None``. enable_normal_float (:obj:`bool`): When ``True``, normal float format is used for quantization. It's only supported when ``weight_dtype`` is equal to ``int3`` and ``int4``. Defaults to ``False``. @@ -300,9 +295,9 @@

    Source code for coremltools.optimize.torch.layerwise_compression.algorithms< def __attrs_post_init__(self): self.weight_n_bits = _get_n_bits_from_dtype(self.weight_dtype) self.weight_dtype = _maybe_convert_str_to_dtype(self.weight_dtype) - if self.weight_dtype not in [_torch.uint8, _torch.float32]: + if self.weight_dtype not in [_torch.uint8, _torch.float16, _torch.float32]: raise ValueError( - f"weight_dtype must be one of (torch.uint8, torch.float32) not {self.weight_dtype}" + f"weight_dtype must be one of (torch.uint8, torch.float16, torch.float32) not {self.weight_dtype}" ) @classmethod @@ -635,7 +630,7 @@

    Source code for coremltools.optimize.torch.layerwise_compression.algorithms< """ A post-training compression algorithm based on the paper `SparseGPT: Massive Language Models Can be Accurately Pruned in One-Shot - <https://arxiv.org/pdf/2301.00774.pdf>`_ + <https://arxiv.org/pdf/2301.00774.pdf>`_. Args: layer (:obj:`torch.nn.Module`): Module to be compressed. @@ -677,7 +672,9 @@

    Source code for coremltools.optimize.torch.layerwise_compression.algorithms< weight = self._layer.weight.data.clone() if isinstance(self._layer, _nn.Conv2d): weight = weight.flatten(1) - weight = weight.float() + + if self._config.weight_dtype in [_torch.float32, _torch.float16]: + weight = weight.to(self._config.weight_dtype) if self._quantizer is not None and not self._quantizer.ready(): self._quantizer.find_params(weight, weight=True) @@ -686,6 +683,7 @@

    Source code for coremltools.optimize.torch.layerwise_compression.algorithms< tick = _time.time() hessian = self._hessian + del self._hessian dead = _torch.diag(hessian) == 0 hessian[dead, dead] = 1 @@ -701,6 +699,10 @@

    Source code for coremltools.optimize.torch.layerwise_compression.algorithms< hessian = _torch.linalg.cholesky(hessian, upper=True) hessian_inverse = hessian + # Hessian computation happens in float32, and _torch.linalg.cholesky does not support float16, so we cast here + if self._config.weight_dtype in [_torch.float32, _torch.float16]: + hessian_inverse = hessian_inverse.to(self._config.weight_dtype) + mask = None for i1 in range(0, self._columns, self._processing_group_size): diff --git a/docs/_build/html/_modules/coremltools/optimize/torch/layerwise_compression/layerwise_compressor.html b/docs/_modules/coremltools/optimize/torch/layerwise_compression/layerwise_compressor.html similarity index 98% rename from docs/_build/html/_modules/coremltools/optimize/torch/layerwise_compression/layerwise_compressor.html rename to docs/_modules/coremltools/optimize/torch/layerwise_compression/layerwise_compressor.html index 57179649f..a28625841 100644 --- a/docs/_build/html/_modules/coremltools/optimize/torch/layerwise_compression/layerwise_compressor.html +++ b/docs/_modules/coremltools/optimize/torch/layerwise_compression/layerwise_compressor.html @@ -1,11 +1,13 @@ + + - coremltools.optimize.torch.layerwise_compression.layerwise_compressor — coremltools API Reference 8.0b1 documentation + coremltools.optimize.torch.layerwise_compression.layerwise_compressor — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -167,18 +162,18 @@

    Source code for coremltools.optimize.torch.layerwise_compression.layerwise_c Args: layers (:obj:`list` of :py:class:`torch.nn.Module` or :obj:`str`): List of layers - to be compressed. When items in the list are :obj:`str`, the string can be a regex - or the exact name of the module. The layers listed should be immediate child modules - of the parent container :py:class:`torch.nn.Sequential` model, and they should be contiguous. + to be compressed. When items in the list are :obj:`str`, the string can be a regex + or the exact name of the module. The layers listed should be immediate child modules + of the parent container :py:class:`torch.nn.Sequential` model, and they should be contiguous. That is, the output of layer ``n`` should be the input to layer ``n+1``. global_config (:py:class:`ModuleGPTQConfig` or :py:class:`ModuleSparseGPTConfig`): Config to be applied globally to all supported modules. Missing values are chosen from the default config. module_type_configs (:obj:`dict` of :obj:`str` to :py:class:`ModuleGPTQConfig` or :py:class:`ModuleSparseGPTConfig`): - Module type configs applied to a specific module class, such as :py:class:`torch.nn.Linear`. + Module type configs applied to a specific module class, such as :py:class:`torch.nn.Linear`. The keys can be either strings or module classes. module_name_configs (:obj:`dict` of :obj:`str` to :py:class:`ModuleGPTQConfig` or :py:class:`ModuleSparseGPTConfig`): - Module-level configs applied to specific modules. The name of the module must either be a regex or - a fully qualified name that can be used to fetch it from the top level module using the + Module-level configs applied to specific modules. The name of the module must either be a regex or + a fully qualified name that can be used to fetch it from the top level module using the ``module.get_submodule(target)`` method. input_cacher (:obj:`str` or :py:class:`FirstLayerInputCacher`): Cacher object that caches inputs which are then fed to the first layer set up for compression. @@ -289,7 +284,7 @@

    Source code for coremltools.optimize.torch.layerwise_compression.layerwise_c class LayerwiseCompressor(_BaseDataCalibratedModelOptimizer): """ A post-training compression algorithm which compresses a sequential model layer by layer - by minimizing the quantization error while quantizing the weights. The implementation + by minimizing the quantization error while quantizing the weights. The implementation supports two variations of this algorithm: 1) `Generative Pre-Trained Transformer Quantization (GPTQ) <https://arxiv.org/pdf/2210.17323.pdf>`_ @@ -303,7 +298,7 @@

    Source code for coremltools.optimize.torch.layerwise_compression.layerwise_c Only sequential models are supported, where the output of one layer feeds into the input of the next layer. - For HuggingFace models, disable the ``use_cache`` config. This is used to speed up decoding, + For HuggingFace models, disable the ``use_cache`` config. This is used to speed up decoding, but to generalize forward pass for :py:class:`LayerwiseCompressor` algorithms across all model types, the behavior must be disabled. diff --git a/docs/_build/html/_modules/coremltools/optimize/torch/optimization_config.html b/docs/_modules/coremltools/optimize/torch/optimization_config.html similarity index 97% rename from docs/_build/html/_modules/coremltools/optimize/torch/optimization_config.html rename to docs/_modules/coremltools/optimize/torch/optimization_config.html index 7e23538b2..8305ad283 100644 --- a/docs/_build/html/_modules/coremltools/optimize/torch/optimization_config.html +++ b/docs/_modules/coremltools/optimize/torch/optimization_config.html @@ -1,11 +1,13 @@ + + - coremltools.optimize.torch.optimization_config — coremltools API Reference 8.0b1 documentation + coremltools.optimize.torch.optimization_config — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/optimize/torch/palettization/palettization_config.html b/docs/_modules/coremltools/optimize/torch/palettization/palettization_config.html similarity index 98% rename from docs/_build/html/_modules/coremltools/optimize/torch/palettization/palettization_config.html rename to docs/_modules/coremltools/optimize/torch/palettization/palettization_config.html index 26ad34413..565ab9402 100644 --- a/docs/_build/html/_modules/coremltools/optimize/torch/palettization/palettization_config.html +++ b/docs/_modules/coremltools/optimize/torch/palettization/palettization_config.html @@ -1,11 +1,13 @@ + + - coremltools.optimize.torch.palettization.palettization_config — coremltools API Reference 8.0b1 documentation + coremltools.optimize.torch.palettization.palettization_config — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -126,6 +121,9 @@

    Source code for coremltools.optimize.torch.palettization.palettization_confi _deprecated_field, _validate_module_type_keys_factory, ) +from coremltools.optimize.torch.palettization._supported_modules import ( + DKMPalettizerModulesRegistry as _DKMPalettizerModulesRegistry, +) # Default advanced options for palettization DEFAULT_PALETTIZATION_ADVANCED_OPTIONS = { @@ -150,7 +148,7 @@

    Source code for coremltools.optimize.torch.palettization.palettization_confi "per_channel_scaling_factor_scheme": "min_max", "percentage_palett_enable": 1.0, "kmeans_batch_threshold": 4, - "kmeans_n_init": 100, + "kmeans_n_init": 10, "zero_threshold": 1e-7, "kmeans_error_bnd": 0.0, "channel_axis": 0, @@ -575,7 +573,9 @@

    Source code for coremltools.optimize.torch.palettization.palettization_confi validator=_validators.deep_mapping( key_validator=_validators.and_( _validators.instance_of((str, _Callable)), - _validate_module_type_keys_factory(list(DEFAULT_PALETTIZATION_SCHEME.keys())), + _validate_module_type_keys_factory( + list(_DKMPalettizerModulesRegistry.get_supported_modules()) + ), ), value_validator=_validate_dkm_config_type, mapping_validator=_validators.instance_of(dict), diff --git a/docs/_build/html/_modules/coremltools/optimize/torch/palettization/palettizer.html b/docs/_modules/coremltools/optimize/torch/palettization/palettizer.html similarity index 82% rename from docs/_build/html/_modules/coremltools/optimize/torch/palettization/palettizer.html rename to docs/_modules/coremltools/optimize/torch/palettization/palettizer.html index e20d73947..3aaa34844 100644 --- a/docs/_build/html/_modules/coremltools/optimize/torch/palettization/palettizer.html +++ b/docs/_modules/coremltools/optimize/torch/palettization/palettizer.html @@ -1,11 +1,13 @@ + + - coremltools.optimize.torch.palettization.palettizer — coremltools API Reference 8.0b1 documentation + coremltools.optimize.torch.palettization.palettizer — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -107,11 +102,17 @@

    Source code for coremltools.optimize.torch.palettization.palettizer

    from torch.ao.quantization import FakeQuantize as _FakeQuantize from coremltools.optimize.torch._typing import ParamsDict as _ParamsDict +from coremltools.optimize.torch._utils.joint_compression_utils import ( + is_pruner_prepared as _is_pruner_prepared, +) from coremltools.optimize.torch._utils.math_utils import rmse_error as _rmse_error from coremltools.optimize.torch._utils.metadata_utils import ( register_metadata_version as _register_metadata_version, ) from coremltools.optimize.torch._utils.torch_utils import get_eval_model as _get_eval_model +from coremltools.optimize.torch._utils.torch_utils import ( + normalize_fsdp_module_name as _normalize_fsdp_module_name, +) from coremltools.optimize.torch._utils.validation_utils import ( validate_param_config as _validate_param_config, ) @@ -120,13 +121,10 @@

    Source code for coremltools.optimize.torch.palettization.palettizer

    ) from coremltools.optimize.torch.base_model_optimizer import _Report from coremltools.optimize.torch.palettization._custom_conversion import ( - PALETTIZATION_CONVERT_DICT as _PALETTIZATION_CONVERT_DICT, -) -from coremltools.optimize.torch.palettization._supported_modules import ( - _get_palettization_qat_mappings, + get_conversion_custom_config_dict as _get_conversion_custom_config_dict, ) from coremltools.optimize.torch.palettization._supported_modules import ( - get_palettizable_parameters as _get_palettizable_parameters, + DKMPalettizerModulesRegistry as _DKMPalettizerModulesRegistry, ) from coremltools.optimize.torch.palettization.fake_palettize import FakePalettize as _FakePalettize from coremltools.optimize.torch.palettization.palettization_config import ( @@ -149,6 +147,20 @@

    Source code for coremltools.optimize.torch.palettization.palettizer

    pass +class _PalettizerLUTObserver(_torch.quantization.MovingAveragePerChannelMinMaxObserver): + """ + Subclassing _torch.quantization.MovingAveragePerChannelMinMaxObserver to add reset_parameters method + """ + def reset_parameters(self) -> None: + """ + FSDP expects reset_parameters method to initialize parameters/buffers in submodules + Initializing buffers used by the MovingAveragePerChannelMinMaxObserver + """ + self.min_val = _torch.empty(0) + self.max_val = _torch.empty(0) + self.eps = _torch.tensor([_torch.finfo(_torch.float32).eps]) + +
    [docs] class DKMPalettizer(Palettizer): @@ -201,7 +213,8 @@

    Source code for coremltools.optimize.torch.palettization.palettizer

    config = _DKMPalettizerConfig() if config is None else config super().__init__(model, config) self._milestones = {} - self._supported_modules = _get_palettization_qat_mappings() + self._palettization_module_mapping = _DKMPalettizerModulesRegistry.REGISTRY + self._supported_modules = _DKMPalettizerModulesRegistry.get_supported_modules() def _palettize_supported_modules(self): """ @@ -209,36 +222,45 @@

    Source code for coremltools.optimize.torch.palettization.palettizer

    """ for name, submodule in self._model.named_modules(remove_duplicate=True): config = self._config.get_module_config(name, submodule) - if type(submodule) in self._supported_modules: - if config is not None: + if isinstance(submodule, self._supported_modules): + palettizer_module = _DKMPalettizerModulesRegistry.get_palettizer_module(submodule) + if config is not None and palettizer_module is not None: submod_configs = config if isinstance(config, list) else [config] for submod_config in submod_configs: if all( param.numel() > submod_config.weight_threshold - for param, _ in _get_palettizable_parameters(submodule) + for param, _ in palettizer_module.get_palettizable_parameters(submodule) ): module_level_advanced_options = self._get_module_level_advanced_options( submodule, submod_config ) + default_config = _DEFAULT_PALETTIZATION_SCHEME[_nn.Linear] n_bits = ( submod_config.n_bits if submod_config.n_bits is not None - else _DEFAULT_PALETTIZATION_SCHEME[type(submodule)]["n_bits"] + else _DEFAULT_PALETTIZATION_SCHEME.get( + type(submodule), default_config + )["n_bits"] ) cluster_dim = ( submod_config.cluster_dim if submod_config.cluster_dim is not None - else _DEFAULT_PALETTIZATION_SCHEME[type(submodule)]["cluster_dim"] + else _DEFAULT_PALETTIZATION_SCHEME.get( + type(submodule), default_config + )["cluster_dim"] ) enable_per_channel_scale = ( submod_config.enable_per_channel_scale if submod_config.enable_per_channel_scale is not None - else _DEFAULT_PALETTIZATION_SCHEME[type(submodule)][ - "enable_per_channel_scale" - ] + else _DEFAULT_PALETTIZATION_SCHEME.get( + type(submodule), default_config + )["enable_per_channel_scale"] ) updated_config = None - for param, param_name in _get_palettizable_parameters(submodule): + for ( + param, + param_name, + ) in palettizer_module.get_palettizable_parameters(submodule): updated_config = _validate_param_config( name + "." + param_name, param, @@ -255,7 +277,7 @@

    Source code for coremltools.optimize.torch.palettization.palettizer

    if not updated_config: continue - self._palettize_module( + self._attach_qconfig( submodule, n_bits, cluster_dim, @@ -271,7 +293,7 @@

    Source code for coremltools.optimize.torch.palettization.palettizer

    self._milestones[name] = updated_config.milestone @staticmethod - def _palettize_module( + def _attach_qconfig( module: _nn.Module, n_bits: int, cluster_dim: int, @@ -285,11 +307,11 @@

    Source code for coremltools.optimize.torch.palettization.palettizer

    advanced_options: _Dict, ): """ - Method to palettize a module. + Attach :py:class:`QConfig` to a module so that it can be palettized. """ fq_activation = _nn.Identity fq_weight = _FakePalettize.with_args( - observer=_torch.quantization.MovingAveragePerChannelMinMaxObserver.with_args( + observer=_PalettizerLUTObserver.with_args( quant_min=quant_min, quant_max=quant_max, dtype=dtype ), n_bits=n_bits, @@ -303,7 +325,7 @@

    Source code for coremltools.optimize.torch.palettization.palettizer

    ) if quantize_activations: fq_activation = _FakeQuantize.with_args( - observer=_torch.quantization.MovingAveragePerChannelMinMaxObserver.with_args( + observer=_PalettizerLUTObserver.with_args( quant_min=quant_min, quant_max=quant_max, dtype=dtype ), quant_min=quant_min, @@ -317,13 +339,17 @@

    Source code for coremltools.optimize.torch.palettization.palettizer

    module: _nn.Module, module_level_config: _ModuleDKMPalettizerConfig ) -> _ParamsDict: """ - Returns advanced_options for a module. First checks whether the user specified something for those options in the - palettization_config. If not, uses the options from the DEFAULT_PALETTIZATION_SCHEME of that module type. - Returns false otherwise. + Returns advanced_options for a module. First checks whether the user specified something for + those options in the palettization_config. If not, uses the options from the + DEFAULT_PALETTIZATION_SCHEME of that module type. """ module_level_advanced_options = {} for key in _DEFAULT_PALETTIZATION_ADVANCED_OPTIONS.keys(): - if key == "cluster_permute" and module_level_config.lut_dtype == "oc_last": + if ( + key == "cluster_permute" + and module_level_config.cluster_dtype == "oc_last" + and hasattr(module, "weight") + ): cluster_permute = list(range(module.weight.dim())) cluster_permute = cluster_permute[1:] + cluster_permute[:1] module_level_advanced_options[key] = cluster_permute @@ -342,16 +368,27 @@

    Source code for coremltools.optimize.torch.palettization.palettizer

    inplace (:obj:`bool`): If ``True``, model transformations are carried out in-place and the original module is mutated, otherwise a copy of the model is mutated and returned. """ + if _is_pruner_prepared(self): + raise RuntimeError( + "Model has been prepared for pruning. When running joint compression, " + "first prepare quantizer/palettizer and then pruner, to ensure insertion of " + "fake quantization layers does not remove pruning forward hooks." + ) self._model = self._get_model_for_compression(inplace) self._model.train() self._palettize_supported_modules() - qat_mappings = _get_palettization_qat_mappings() self._model = _torch.quantization.prepare_qat( - self._model, mapping=qat_mappings, inplace=True + self._model, mapping=self._palettization_module_mapping, inplace=True ) + self._default_enable_fake_palett() return self._model
    + def _default_enable_fake_palett(self): + for name, module in self._model.named_modules(): + if name in self._milestones and self._milestones[name] == 0: + self._enable_fake_palett_impl(module, True) +
    [docs] def finalize(self, model: _Optional[_nn.Module] = None, inplace: bool = False) -> _nn.Module: @@ -372,7 +409,9 @@

    Source code for coremltools.optimize.torch.palettization.palettizer

    model.eval() finalized_model = _torch.quantization.convert( - model, convert_custom_config_dict=_PALETTIZATION_CONVERT_DICT, inplace=inplace + model, + convert_custom_config_dict=_get_conversion_custom_config_dict(), + inplace=inplace, ) if model is None: @@ -389,11 +428,14 @@

    Source code for coremltools.optimize.torch.palettization.palettizer

    is called is equal to ``milestone``, palettization is enabled. """ for name, module in self._model.named_modules(): - if name in self._milestones: - if self._step_count == self._milestones[name]: + # FSDP flattens model parameters leading to change in module names + # _normalize_fsdp_module_name function call to get original module name + normalized_name = _normalize_fsdp_module_name(name) + if normalized_name in self._milestones: + if self._step_count == self._milestones[normalized_name]: self._enable_fake_palett_impl(module, True) self._init_prune_threshold_and_module_wise_target_sparsity(module) - if self._step_count > self._milestones[name]: + if self._step_count > self._milestones[normalized_name]: self._update_prune_threshold(module) self._step_count += 1
    diff --git a/docs/_build/html/_modules/coremltools/optimize/torch/palettization/post_training_palettization.html b/docs/_modules/coremltools/optimize/torch/palettization/post_training_palettization.html similarity index 98% rename from docs/_build/html/_modules/coremltools/optimize/torch/palettization/post_training_palettization.html rename to docs/_modules/coremltools/optimize/torch/palettization/post_training_palettization.html index 55096b01c..d1d2875bf 100644 --- a/docs/_build/html/_modules/coremltools/optimize/torch/palettization/post_training_palettization.html +++ b/docs/_modules/coremltools/optimize/torch/palettization/post_training_palettization.html @@ -1,11 +1,13 @@ + + - coremltools.optimize.torch.palettization.post_training_palettization — coremltools API Reference 8.0b1 documentation + coremltools.optimize.torch.palettization.post_training_palettization — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -180,7 +175,7 @@

    Source code for coremltools.optimize.torch.palettization.post_training_palet table. The ``granularity`` is set to ``per_tensor``, and ``group_size`` is ``None``. 2. **Per-grouped-channel palettization**: In this configuration, the number of channels ``group_size`` along - ``channel_axis`` share the same lookup table. For example, for a weight matrix of shape ``(16, 25)``, if we provide + ``channel_axis`` share the same lookup table. For example, for a weight matrix of shape ``(16, 25)``, if we provide ``group_size = 8``, the shape of the lookup table would be ``(2, 2^n_bits)``. .. note:: diff --git a/docs/_build/html/_modules/coremltools/optimize/torch/palettization/sensitive_k_means.html b/docs/_modules/coremltools/optimize/torch/palettization/sensitive_k_means.html similarity index 99% rename from docs/_build/html/_modules/coremltools/optimize/torch/palettization/sensitive_k_means.html rename to docs/_modules/coremltools/optimize/torch/palettization/sensitive_k_means.html index 05ea5c448..66758b15a 100644 --- a/docs/_build/html/_modules/coremltools/optimize/torch/palettization/sensitive_k_means.html +++ b/docs/_modules/coremltools/optimize/torch/palettization/sensitive_k_means.html @@ -1,11 +1,13 @@ + + - coremltools.optimize.torch.palettization.sensitive_k_means — coremltools API Reference 8.0b1 documentation + coremltools.optimize.torch.palettization.sensitive_k_means — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -193,7 +188,7 @@

    Source code for coremltools.optimize.torch.palettization.sensitive_k_means table. The ``granularity`` is set to ``per_tensor``, and ``group_size`` is ``None``. 2. **Per-grouped-channel palettization**: In this configuration, the number of channels ``group_size`` along - ``channel_axis`` share the same lookup table. For example, for a weight matrix of shape ``(16, 25)``, if we provide + ``channel_axis`` share the same lookup table. For example, for a weight matrix of shape ``(16, 25)``, if we provide ``group_size = 8``, the shape of the lookup table would be ``(2, 2^n_bits)``. .. note:: @@ -333,11 +328,11 @@

    Source code for coremltools.optimize.torch.palettization.sensitive_k_means Perform post-training palettization of weights by running a weighted k-means on the model weights. The weight values used for weighing different elements of a model's weight matrix are computed using the Fisher information matrix, which - is an approximation of the Hessian. These weight values indicate how sensitive - a given weight element is: the more sensitive an element, the larger the impact perturbing - or palettizing it has on the model’s loss function. This means that weighted k-means - moves the clusters closer to the sensitive weight values, allowing them to be - represented more exactly. This leads to a lower degradation in model performance + is an approximation of the Hessian. These weight values indicate how sensitive + a given weight element is: the more sensitive an element, the larger the impact perturbing + or palettizing it has on the model’s loss function. This means that weighted k-means + moves the clusters closer to the sensitive weight values, allowing them to be + represented more exactly. This leads to a lower degradation in model performance after palettization. The Fisher information matrix is computed using a few samples of calibration data. diff --git a/docs/_build/html/_modules/coremltools/optimize/torch/pruning/magnitude_pruner.html b/docs/_modules/coremltools/optimize/torch/pruning/magnitude_pruner.html similarity index 98% rename from docs/_build/html/_modules/coremltools/optimize/torch/pruning/magnitude_pruner.html rename to docs/_modules/coremltools/optimize/torch/pruning/magnitude_pruner.html index 45adddecb..4279608d2 100644 --- a/docs/_build/html/_modules/coremltools/optimize/torch/pruning/magnitude_pruner.html +++ b/docs/_modules/coremltools/optimize/torch/pruning/magnitude_pruner.html @@ -1,11 +1,13 @@ + + - coremltools.optimize.torch.pruning.magnitude_pruner — coremltools API Reference 8.0b1 documentation + coremltools.optimize.torch.pruning.magnitude_pruner — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/optimize/torch/pruning/pruning_scheduler.html b/docs/_modules/coremltools/optimize/torch/pruning/pruning_scheduler.html similarity index 97% rename from docs/_build/html/_modules/coremltools/optimize/torch/pruning/pruning_scheduler.html rename to docs/_modules/coremltools/optimize/torch/pruning/pruning_scheduler.html index 02ec8958f..c5bff4be9 100644 --- a/docs/_build/html/_modules/coremltools/optimize/torch/pruning/pruning_scheduler.html +++ b/docs/_modules/coremltools/optimize/torch/pruning/pruning_scheduler.html @@ -1,11 +1,13 @@ + + - coremltools.optimize.torch.pruning.pruning_scheduler — coremltools API Reference 8.0b1 documentation + coremltools.optimize.torch.pruning.pruning_scheduler — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    diff --git a/docs/_build/html/_modules/coremltools/optimize/torch/quantization/post_training_quantization.html b/docs/_modules/coremltools/optimize/torch/quantization/post_training_quantization.html similarity index 95% rename from docs/_build/html/_modules/coremltools/optimize/torch/quantization/post_training_quantization.html rename to docs/_modules/coremltools/optimize/torch/quantization/post_training_quantization.html index 941edb7fe..827de80bc 100644 --- a/docs/_build/html/_modules/coremltools/optimize/torch/quantization/post_training_quantization.html +++ b/docs/_modules/coremltools/optimize/torch/quantization/post_training_quantization.html @@ -1,11 +1,13 @@ + + - coremltools.optimize.torch.quantization.post_training_quantization — coremltools API Reference 8.0b1 documentation + coremltools.optimize.torch.quantization.post_training_quantization — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -119,7 +114,7 @@

    Source code for coremltools.optimize.torch.quantization.post_training_quanti from attrs import validators as _validators from coremltools.converters.mil.mil.ops.defs.iOS18 import constexpr_blockwise_shift_scale -from coremltools.optimize.coreml._utils import compute_qparams as _ct_compute_qparams +from coremltools.optimize.coreml._utils import compute_qparams as _cti_compute_qparams from coremltools.optimize.torch._utils.metadata_utils import ( CompressionMetadata as _CompressionMetadata, ) @@ -162,6 +157,20 @@

    Source code for coremltools.optimize.torch.quantization.post_training_quanti _logger = _logging.getLogger(__name__) +_SUPPORTED_WEIGHT_DTYPE = [ + "int8", + "uint8", + "int4", + "uint4", + "fp8_e4m3", + "fp8_e5m2", + "float32", + _torch.int8, + _torch.uint8, + _torch.float8_e4m3fn, + _torch.float8_e5m2, + _torch.float32, +]
    [docs] @@ -198,7 +207,7 @@

    Source code for coremltools.optimize.torch.quantization.post_training_quanti all values in the tensor will share a single scale and, if applicable, a single zero point. The ``granularity`` argument is set to ``per_tensor``. - 3. **Per-block quantization**: This configuration is used to structure the tensor for blockwise quantization. The ``granularity`` + 3. **Per-block quantization**: This configuration is used to structure the tensor for blockwise quantization. The ``granularity`` is set to ``per_block``, and the ``block_size`` argument has to be specified. The ``block_size`` argument can either be of type ``int`` or ``tuple``: * int: In this configuration, each row along the output channel axis will have its own quantization parameters, similar to the ``per_channel`` configuration. @@ -217,11 +226,6 @@

    Source code for coremltools.optimize.torch.quantization.post_training_quanti """ weight_dtype: _Union[str, _torch.dtype] = _field( default=_default_ptq_options["weight_dtype"], - converter=_maybe_convert_str_to_dtype, - validator=[ - _validators.instance_of(_torch.dtype), - _validators.in_([_torch.int8, _torch.uint8, _torch.float32]), - ], ) granularity: QuantizationGranularity = _field( default=_default_ptq_options["granularity"], @@ -245,7 +249,12 @@

    Source code for coremltools.optimize.torch.quantization.post_training_quanti ) def __attrs_post_init__(self): + if self.weight_dtype not in _SUPPORTED_WEIGHT_DTYPE: + raise ValueError( + f"weight_dtype must be one of {_SUPPORTED_WEIGHT_DTYPE} not {self.weight_dtype}" + ) self.weight_n_bits = _get_n_bits_from_dtype(self.weight_dtype) + self.weight_dtype = _maybe_convert_str_to_dtype(self.weight_dtype) @block_size.validator def per_block_granularity(self, attribute, value): @@ -359,7 +368,7 @@

    Source code for coremltools.optimize.torch.quantization.post_training_quanti .. note:: After quantization, the weight values stored will still remain in full precision, so the PyTorch model size will not be reduced. To see the reduction in model size, please convert - the model using ``coremltools.convert(...)``, which will produce a model intermediate language + the model using ``coremltools.convert(...)``, which will produce a model intermediate language (MIL) model containing the compressed weights. Example: @@ -384,7 +393,7 @@

    Source code for coremltools.optimize.torch.quantization.post_training_quanti ) # initialize the quantizer - config = PostTrainingQuantizerConfig.from_dict( + config = PostTrainingquantizerConfig.from_dict( { "global_config": { "weight_dtype": "int8", @@ -448,7 +457,7 @@

    Source code for coremltools.optimize.torch.quantization.post_training_quanti Compute quantization parameters """ - ret = _ct_compute_qparams( + ret = _cti_compute_qparams( weight=weight, nbits=nbits, quantization_mode=quantization_mode, diff --git a/docs/_build/html/_modules/coremltools/optimize/torch/quantization/quantization_config.html b/docs/_modules/coremltools/optimize/torch/quantization/quantization_config.html similarity index 96% rename from docs/_build/html/_modules/coremltools/optimize/torch/quantization/quantization_config.html rename to docs/_modules/coremltools/optimize/torch/quantization/quantization_config.html index 3dc9dce65..2c5569d4a 100644 --- a/docs/_build/html/_modules/coremltools/optimize/torch/quantization/quantization_config.html +++ b/docs/_modules/coremltools/optimize/torch/quantization/quantization_config.html @@ -1,11 +1,13 @@ + + - coremltools.optimize.torch.quantization.quantization_config — coremltools API Reference 8.0b1 documentation + coremltools.optimize.torch.quantization.quantization_config — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -203,6 +198,16 @@

    Source code for coremltools.optimize.torch.quantization.quantization_config< # Backends only support 4 and 8 bit quantization _SUPPORTED_N_BITS = [4, 8, 32] +_SUPPORTED_WEIGHT_DTYPE = [ + "qint4", + "quint4", + "qint8", + "quint8", + "float32", + _torch.qint8, + _torch.quint8, + _torch.float32, +]
    @@ -339,12 +344,12 @@

    Source code for coremltools.optimize.torch.quantization.quantization_config< ) def __attrs_post_init__(self): - self.weight_n_bits = _get_n_bits_from_dtype(self.weight_dtype) - self.weight_dtype = _maybe_convert_str_to_dtype(self.weight_dtype) - if self.weight_dtype not in [_torch.qint8, _torch.quint8, _torch.float32]: + if self.weight_dtype not in _SUPPORTED_WEIGHT_DTYPE: raise ValueError( - f"weight_dtype must be one of (_torch.qint8, _torch.quint8, _torch.float32) not {self.weight_dtype}" + f"weight_dtype must be one of {_SUPPORTED_WEIGHT_DTYPE} not {self.weight_dtype}" ) + self.weight_n_bits = _get_n_bits_from_dtype(self.weight_dtype) + self.weight_dtype = _maybe_convert_str_to_dtype(self.weight_dtype) @milestones.validator def _check_milestones(self, attribute, value): diff --git a/docs/_build/html/_modules/coremltools/optimize/torch/quantization/quantizer.html b/docs/_modules/coremltools/optimize/torch/quantization/quantizer.html similarity index 94% rename from docs/_build/html/_modules/coremltools/optimize/torch/quantization/quantizer.html rename to docs/_modules/coremltools/optimize/torch/quantization/quantizer.html index 57357d734..8c750c9c7 100644 --- a/docs/_build/html/_modules/coremltools/optimize/torch/quantization/quantizer.html +++ b/docs/_modules/coremltools/optimize/torch/quantization/quantizer.html @@ -1,11 +1,13 @@ + + - coremltools.optimize.torch.quantization.quantizer — coremltools API Reference 8.0b1 documentation + coremltools.optimize.torch.quantization.quantizer — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -112,6 +107,9 @@

    Source code for coremltools.optimize.torch.quantization.quantizer

    from torch.ao.quantization.quantize_fx import convert_to_reference_fx as _convert_to_reference_fx import coremltools.optimize.torch.quantization.modules.qat_modules as _qat +from coremltools.optimize.torch._utils.joint_compression_utils import ( + is_pruner_prepared as _is_pruner_prepared, +) from coremltools.optimize.torch._utils.math_utils import rmse_error as _rmse_error from coremltools.optimize.torch._utils.metadata_utils import ( register_metadata_version as _register_metadata_version, @@ -254,33 +252,26 @@

    Source code for coremltools.optimize.torch.quantization.quantizer

    return config return _ModuleLinearQuantizerConfig() -
    -[docs] - def prepare(self, example_inputs: _Tuple[_Any, ...], inplace: bool = False) -> _torch.nn.Module: + def _get_torch_ao_prepared_model( + self, example_inputs: _Tuple[_Any, ...], inplace: bool = False + ): """ + Helper method to prepare quantized model : Prepares the model for quantization aware training by inserting - :py:class:`torch.ao.quantization.FakeQuantize` layers in the model in appropriate places. - - Args: - example_inputs (:obj:`Tuple[Any, ...]`): Example inputs for forward function of the model, - tuple of positional args (keyword args can be passed as positional args as well) - inplace (:obj:`bool`): If ``True``, model transformations are carried out in-place and - the original module is mutated, otherwise a copy of the model is mutated and returned. - - .. note:: - This method uses `prepare_qat_fx method <https://pytorch.org/docs/stable/generated/torch.ao.quantization.quantize_fx.prepare_qat_fx.html#torch.ao.quantization.quantize_fx.prepare_qat_fx>`_ - to insert quantization layers and the returned model is a :py:class:`torch.fx.GraphModule`. - Some models, like those with dynamic control flow, may not be trace-able into a - :py:class:`torch.fx.GraphModule`. Please follow directions in `Limitations of Symbolic Tracing <https://pytorch.org/docs/stable/fx.html#limitations-of-symbolic-tracing>`_ - to update your model first before using :py:class:`LinearQuantizer` algorithm. - + :py:class:`torch.ao.quantization.FakeQuantize` layers in the model in appropriate places. """ if self._is_prepared: _logger.warning( - "Model has already been prepared for QAT. This API call " - "will be a no-op." + "Model has already been prepared for QAT. This API call " "will be a no-op." ) return self._model + + if _is_pruner_prepared(self): + raise RuntimeError( + "Model has been prepared for pruning. When running joint compression, " + "first prepare quantizer/palettizer and then pruner, to ensure insertion of " + "fake quantization layers does not remove pruning forward hooks." + ) model = self._get_model_for_compression(inplace=inplace) model.train() prepare_custom_config = _PrepareCustomConfig().set_non_traceable_module_names( @@ -295,7 +286,31 @@

    Source code for coremltools.optimize.torch.quantization.quantizer

    backend_config=_get_backend_config(), quantization_scheme=self._quantization_scheme, ) - prepared_model = qat_handler.prepare(model, example_inputs) + return qat_handler.prepare(model, example_inputs) + +
    +[docs] + def prepare(self, example_inputs: _Tuple[_Any, ...], inplace: bool = False) -> _torch.nn.Module: + """ + Prepares the model for quantization aware training by inserting + :py:class:`torch.ao.quantization.FakeQuantize` layers in the model in appropriate places. + + Args: + example_inputs (:obj:`Tuple[Any, ...]`): Example inputs for forward function of the model, + tuple of positional args (keyword args can be passed as positional args as well) + inplace (:obj:`bool`): If ``True``, model transformations are carried out in-place and + the original module is mutated, otherwise a copy of the model is mutated and returned. + + .. note:: + This method uses `prepare_qat_fx method <https://pytorch.org/docs/stable/generated/torch.ao.quantization.quantize_fx.prepare_qat_fx.html#torch.ao.quantization.quantize_fx.prepare_qat_fx>`_ + to insert quantization layers and the returned model is a :py:class:`torch.fx.GraphModule`. + Some models, like those with dynamic control flow, may not be trace-able into a + :py:class:`torch.fx.GraphModule`. Please follow directions in `Limitations of Symbolic Tracing <https://pytorch.org/docs/stable/fx.html#limitations-of-symbolic-tracing>`_ + to update your model first before using :py:class:`LinearQuantizer` algorithm. + + """ + + prepared_model = self._get_torch_ao_prepared_model(example_inputs, inplace) if self._milestones is not None: prepared_model.apply(_aoquant.disable_observer) prepared_model.apply(_aoquant.disable_fake_quant) diff --git a/docs/_build/html/_modules/index.html b/docs/_modules/index.html similarity index 96% rename from docs/_build/html/_modules/index.html rename to docs/_modules/index.html index 6c8028472..47d669dbb 100644 --- a/docs/_build/html/_modules/index.html +++ b/docs/_modules/index.html @@ -1,11 +1,13 @@ + + - Overview: module code — coremltools API Reference 8.0b1 documentation + Overview: module code — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -163,6 +158,8 @@

    All modules for which code is available

  • coremltools.converters.xgboost._tree
  • coremltools.models._compiled_model
  • coremltools.models.array_feature_extractor
  • +
  • coremltools.models.compute_device
  • +
  • coremltools.models.compute_plan
  • coremltools.models.feature_vectorizer
  • coremltools.models.ml_program.compression_utils
  • coremltools.models.model
  • diff --git a/docs/_build/html/_sources/_examples/dkm_palettization.rst.txt b/docs/_sources/_examples/dkm_palettization.rst.txt similarity index 100% rename from docs/_build/html/_sources/_examples/dkm_palettization.rst.txt rename to docs/_sources/_examples/dkm_palettization.rst.txt diff --git a/docs/_build/html/_sources/_examples/index.rst.txt b/docs/_sources/_examples/index.rst.txt similarity index 100% rename from docs/_build/html/_sources/_examples/index.rst.txt rename to docs/_sources/_examples/index.rst.txt diff --git a/docs/_build/html/_sources/_examples/linear_quantization.rst.txt b/docs/_sources/_examples/linear_quantization.rst.txt similarity index 100% rename from docs/_build/html/_sources/_examples/linear_quantization.rst.txt rename to docs/_sources/_examples/linear_quantization.rst.txt diff --git a/docs/_build/html/_sources/_examples/magnitude_pruning.rst.txt b/docs/_sources/_examples/magnitude_pruning.rst.txt similarity index 100% rename from docs/_build/html/_sources/_examples/magnitude_pruning.rst.txt rename to docs/_sources/_examples/magnitude_pruning.rst.txt diff --git a/docs/_build/html/_sources/_examples/sg_execution_times.rst.txt b/docs/_sources/_examples/sg_execution_times.rst.txt similarity index 100% rename from docs/_build/html/_sources/_examples/sg_execution_times.rst.txt rename to docs/_sources/_examples/sg_execution_times.rst.txt diff --git a/docs/_build/html/_sources/index.rst.txt b/docs/_sources/index.rst.txt similarity index 100% rename from docs/_build/html/_sources/index.rst.txt rename to docs/_sources/index.rst.txt diff --git a/docs/_build/html/_sources/sg_execution_times.rst.txt b/docs/_sources/sg_execution_times.rst.txt similarity index 100% rename from docs/_build/html/_sources/sg_execution_times.rst.txt rename to docs/_sources/sg_execution_times.rst.txt diff --git a/docs/_build/html/_sources/source/api-versions.rst.txt b/docs/_sources/source/api-versions.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/api-versions.rst.txt rename to docs/_sources/source/api-versions.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.converters.convert.rst.txt b/docs/_sources/source/coremltools.converters.convert.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.converters.convert.rst.txt rename to docs/_sources/source/coremltools.converters.convert.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.converters.libsvm.rst.txt b/docs/_sources/source/coremltools.converters.libsvm.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.converters.libsvm.rst.txt rename to docs/_sources/source/coremltools.converters.libsvm.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.converters.mil.input_types.rst.txt b/docs/_sources/source/coremltools.converters.mil.input_types.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.converters.mil.input_types.rst.txt rename to docs/_sources/source/coremltools.converters.mil.input_types.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.converters.mil.mil.ops.defs.rst.txt b/docs/_sources/source/coremltools.converters.mil.mil.ops.defs.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.converters.mil.mil.ops.defs.rst.txt rename to docs/_sources/source/coremltools.converters.mil.mil.ops.defs.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.converters.mil.mil.passes.defs.rst.txt b/docs/_sources/source/coremltools.converters.mil.mil.passes.defs.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.converters.mil.mil.passes.defs.rst.txt rename to docs/_sources/source/coremltools.converters.mil.mil.passes.defs.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.converters.mil.rst.txt b/docs/_sources/source/coremltools.converters.mil.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.converters.mil.rst.txt rename to docs/_sources/source/coremltools.converters.mil.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.converters.rst.txt b/docs/_sources/source/coremltools.converters.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.converters.rst.txt rename to docs/_sources/source/coremltools.converters.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.converters.sklearn.rst.txt b/docs/_sources/source/coremltools.converters.sklearn.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.converters.sklearn.rst.txt rename to docs/_sources/source/coremltools.converters.sklearn.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.converters.xgboost.rst.txt b/docs/_sources/source/coremltools.converters.xgboost.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.converters.xgboost.rst.txt rename to docs/_sources/source/coremltools.converters.xgboost.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.models.ml_program.rst.txt b/docs/_sources/source/coremltools.models.ml_program.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.models.ml_program.rst.txt rename to docs/_sources/source/coremltools.models.ml_program.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.models.neural_network.rst.txt b/docs/_sources/source/coremltools.models.neural_network.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.models.neural_network.rst.txt rename to docs/_sources/source/coremltools.models.neural_network.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.models.rst.txt b/docs/_sources/source/coremltools.models.rst.txt similarity index 86% rename from docs/_build/html/_sources/source/coremltools.models.rst.txt rename to docs/_sources/source/coremltools.models.rst.txt index d77a49d8d..c53a4deab 100644 --- a/docs/_build/html/_sources/source/coremltools.models.rst.txt +++ b/docs/_sources/source/coremltools.models.rst.txt @@ -80,3 +80,15 @@ utils .. automodule:: coremltools.models.utils :members: + +compute\_plan +------------------------------- + +.. automodule:: coremltools.models.compute_plan + :members: + +compute\_device +------------------------------- + +.. automodule:: coremltools.models.compute_device + :members: \ No newline at end of file diff --git a/docs/_build/html/_sources/source/coremltools.optimize.coreml.palettization.rst.txt b/docs/_sources/source/coremltools.optimize.coreml.palettization.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.optimize.coreml.palettization.rst.txt rename to docs/_sources/source/coremltools.optimize.coreml.palettization.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.optimize.coreml.post_training_quantization.rst.txt b/docs/_sources/source/coremltools.optimize.coreml.post_training_quantization.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.optimize.coreml.post_training_quantization.rst.txt rename to docs/_sources/source/coremltools.optimize.coreml.post_training_quantization.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.optimize.coreml.pruning.rst.txt b/docs/_sources/source/coremltools.optimize.coreml.pruning.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.optimize.coreml.pruning.rst.txt rename to docs/_sources/source/coremltools.optimize.coreml.pruning.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.optimize.coreml.quantization.rst.txt b/docs/_sources/source/coremltools.optimize.coreml.quantization.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.optimize.coreml.quantization.rst.txt rename to docs/_sources/source/coremltools.optimize.coreml.quantization.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.optimize.coreml.utilities.rst.txt b/docs/_sources/source/coremltools.optimize.coreml.utilities.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.optimize.coreml.utilities.rst.txt rename to docs/_sources/source/coremltools.optimize.coreml.utilities.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.optimize.rst.txt b/docs/_sources/source/coremltools.optimize.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.optimize.rst.txt rename to docs/_sources/source/coremltools.optimize.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.optimize.torch.examples.rst.txt b/docs/_sources/source/coremltools.optimize.torch.examples.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.optimize.torch.examples.rst.txt rename to docs/_sources/source/coremltools.optimize.torch.examples.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.optimize.torch.palettization.rst.txt b/docs/_sources/source/coremltools.optimize.torch.palettization.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.optimize.torch.palettization.rst.txt rename to docs/_sources/source/coremltools.optimize.torch.palettization.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.optimize.torch.pruning.rst.txt b/docs/_sources/source/coremltools.optimize.torch.pruning.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.optimize.torch.pruning.rst.txt rename to docs/_sources/source/coremltools.optimize.torch.pruning.rst.txt diff --git a/docs/_build/html/_sources/source/coremltools.optimize.torch.quantization.rst.txt b/docs/_sources/source/coremltools.optimize.torch.quantization.rst.txt similarity index 100% rename from docs/_build/html/_sources/source/coremltools.optimize.torch.quantization.rst.txt rename to docs/_sources/source/coremltools.optimize.torch.quantization.rst.txt diff --git a/docs/_build/html/_static/_sphinx_javascript_frameworks_compat.js b/docs/_static/_sphinx_javascript_frameworks_compat.js similarity index 100% rename from docs/_build/html/_static/_sphinx_javascript_frameworks_compat.js rename to docs/_static/_sphinx_javascript_frameworks_compat.js diff --git a/docs/_build/html/_static/basic.css b/docs/_static/basic.css similarity index 100% rename from docs/_build/html/_static/basic.css rename to docs/_static/basic.css diff --git a/docs/_build/html/_static/binder_badge_logo.svg b/docs/_static/binder_badge_logo.svg similarity index 100% rename from docs/_build/html/_static/binder_badge_logo.svg rename to docs/_static/binder_badge_logo.svg diff --git a/docs/_build/html/_static/broken_example.png b/docs/_static/broken_example.png similarity index 100% rename from docs/_build/html/_static/broken_example.png rename to docs/_static/broken_example.png diff --git a/docs/_build/html/_static/css/badge_only.css b/docs/_static/css/badge_only.css similarity index 76% rename from docs/_build/html/_static/css/badge_only.css rename to docs/_static/css/badge_only.css index c718cee44..88ba55b96 100644 --- a/docs/_build/html/_static/css/badge_only.css +++ b/docs/_static/css/badge_only.css @@ -1 +1 @@ -.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} \ No newline at end of file +.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions .rst-other-versions .rtd-current-item{font-weight:700}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}#flyout-search-form{padding:6px} \ No newline at end of file diff --git a/docs/_build/html/_static/css/fonts/Roboto-Slab-Bold.woff b/docs/_static/css/fonts/Roboto-Slab-Bold.woff similarity index 100% rename from docs/_build/html/_static/css/fonts/Roboto-Slab-Bold.woff rename to docs/_static/css/fonts/Roboto-Slab-Bold.woff diff --git a/docs/_build/html/_static/css/fonts/Roboto-Slab-Bold.woff2 b/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 similarity index 100% rename from docs/_build/html/_static/css/fonts/Roboto-Slab-Bold.woff2 rename to docs/_static/css/fonts/Roboto-Slab-Bold.woff2 diff --git a/docs/_build/html/_static/css/fonts/Roboto-Slab-Regular.woff b/docs/_static/css/fonts/Roboto-Slab-Regular.woff similarity index 100% rename from docs/_build/html/_static/css/fonts/Roboto-Slab-Regular.woff rename to docs/_static/css/fonts/Roboto-Slab-Regular.woff diff --git a/docs/_build/html/_static/css/fonts/Roboto-Slab-Regular.woff2 b/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 similarity index 100% rename from docs/_build/html/_static/css/fonts/Roboto-Slab-Regular.woff2 rename to docs/_static/css/fonts/Roboto-Slab-Regular.woff2 diff --git a/docs/_build/html/_static/css/fonts/fontawesome-webfont.eot b/docs/_static/css/fonts/fontawesome-webfont.eot similarity index 100% rename from docs/_build/html/_static/css/fonts/fontawesome-webfont.eot rename to docs/_static/css/fonts/fontawesome-webfont.eot diff --git a/docs/_build/html/_static/css/fonts/fontawesome-webfont.svg b/docs/_static/css/fonts/fontawesome-webfont.svg similarity index 100% rename from docs/_build/html/_static/css/fonts/fontawesome-webfont.svg rename to docs/_static/css/fonts/fontawesome-webfont.svg diff --git a/docs/_build/html/_static/css/fonts/fontawesome-webfont.ttf b/docs/_static/css/fonts/fontawesome-webfont.ttf similarity index 100% rename from docs/_build/html/_static/css/fonts/fontawesome-webfont.ttf rename to docs/_static/css/fonts/fontawesome-webfont.ttf diff --git a/docs/_build/html/_static/css/fonts/fontawesome-webfont.woff b/docs/_static/css/fonts/fontawesome-webfont.woff similarity index 100% rename from docs/_build/html/_static/css/fonts/fontawesome-webfont.woff rename to docs/_static/css/fonts/fontawesome-webfont.woff diff --git a/docs/_build/html/_static/css/fonts/fontawesome-webfont.woff2 b/docs/_static/css/fonts/fontawesome-webfont.woff2 similarity index 100% rename from docs/_build/html/_static/css/fonts/fontawesome-webfont.woff2 rename to docs/_static/css/fonts/fontawesome-webfont.woff2 diff --git a/docs/_build/html/_static/css/fonts/lato-bold-italic.woff b/docs/_static/css/fonts/lato-bold-italic.woff similarity index 100% rename from docs/_build/html/_static/css/fonts/lato-bold-italic.woff rename to docs/_static/css/fonts/lato-bold-italic.woff diff --git a/docs/_build/html/_static/css/fonts/lato-bold-italic.woff2 b/docs/_static/css/fonts/lato-bold-italic.woff2 similarity index 100% rename from docs/_build/html/_static/css/fonts/lato-bold-italic.woff2 rename to docs/_static/css/fonts/lato-bold-italic.woff2 diff --git a/docs/_build/html/_static/css/fonts/lato-bold.woff b/docs/_static/css/fonts/lato-bold.woff similarity index 100% rename from docs/_build/html/_static/css/fonts/lato-bold.woff rename to docs/_static/css/fonts/lato-bold.woff diff --git a/docs/_build/html/_static/css/fonts/lato-bold.woff2 b/docs/_static/css/fonts/lato-bold.woff2 similarity index 100% rename from docs/_build/html/_static/css/fonts/lato-bold.woff2 rename to docs/_static/css/fonts/lato-bold.woff2 diff --git a/docs/_build/html/_static/css/fonts/lato-normal-italic.woff b/docs/_static/css/fonts/lato-normal-italic.woff similarity index 100% rename from docs/_build/html/_static/css/fonts/lato-normal-italic.woff rename to docs/_static/css/fonts/lato-normal-italic.woff diff --git a/docs/_build/html/_static/css/fonts/lato-normal-italic.woff2 b/docs/_static/css/fonts/lato-normal-italic.woff2 similarity index 100% rename from docs/_build/html/_static/css/fonts/lato-normal-italic.woff2 rename to docs/_static/css/fonts/lato-normal-italic.woff2 diff --git a/docs/_build/html/_static/css/fonts/lato-normal.woff b/docs/_static/css/fonts/lato-normal.woff similarity index 100% rename from docs/_build/html/_static/css/fonts/lato-normal.woff rename to docs/_static/css/fonts/lato-normal.woff diff --git a/docs/_build/html/_static/css/fonts/lato-normal.woff2 b/docs/_static/css/fonts/lato-normal.woff2 similarity index 100% rename from docs/_build/html/_static/css/fonts/lato-normal.woff2 rename to docs/_static/css/fonts/lato-normal.woff2 diff --git a/docs/_build/html/_static/css/norightmargin.css b/docs/_static/css/norightmargin.css similarity index 100% rename from docs/_build/html/_static/css/norightmargin.css rename to docs/_static/css/norightmargin.css diff --git a/docs/_build/html/_static/css/theme.css b/docs/_static/css/theme.css similarity index 73% rename from docs/_build/html/_static/css/theme.css rename to docs/_static/css/theme.css index 19a446a0e..0f14f1064 100644 --- a/docs/_build/html/_static/css/theme.css +++ b/docs/_static/css/theme.css @@ -1,4 +1,4 @@ html{box-sizing:border-box}*,:after,:before{box-sizing:inherit}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}[hidden],audio:not([controls]){display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}blockquote{margin:0}dfn{font-style:italic}ins{background:#ff9;text-decoration:none}ins,mark{color:#000}mark{background:#ff0;font-style:italic;font-weight:700}.rst-content code,.rst-content tt,code,kbd,pre,samp{font-family:monospace,serif;_font-family:courier new,monospace;font-size:1em}pre{white-space:pre}q{quotes:none}q:after,q:before{content:"";content:none}small{font-size:85%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}dl,ol,ul{margin:0;padding:0;list-style:none;list-style-image:none}li{list-style:none}dd{margin:0}img{border:0;-ms-interpolation-mode:bicubic;vertical-align:middle;max-width:100%}svg:not(:root){overflow:hidden}figure,form{margin:0}label{cursor:pointer}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button,input[type=button],input[type=reset],input[type=submit]{cursor:pointer;-webkit-appearance:button;*overflow:visible}button[disabled],input[disabled]{cursor:default}input[type=search]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}textarea{resize:vertical}table{border-collapse:collapse;border-spacing:0}td{vertical-align:top}.chromeframe{margin:.2em 0;background:#ccc;color:#000;padding:.2em 0}.ir{display:block;border:0;text-indent:-999em;overflow:hidden;background-color:transparent;background-repeat:no-repeat;text-align:left;direction:ltr;*line-height:0}.ir br{display:none}.hidden{display:none!important;visibility:hidden}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.relative{position:relative}big,small{font-size:100%}@media print{body,html,section{background:none!important}*{box-shadow:none!important;text-shadow:none!important;filter:none!important;-ms-filter:none!important}a,a:visited{text-decoration:underline}.ir a:after,a[href^="#"]:after,a[href^="javascript:"]:after{content:""}blockquote,pre{page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}.rst-content .toctree-wrapper>p.caption,h2,h3,p{orphans:3;widows:3}.rst-content .toctree-wrapper>p.caption,h2,h3{page-break-after:avoid}}.btn,.fa:before,.icon:before,.rst-content .admonition,.rst-content .admonition-title:before,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .code-block-caption .headerlink:before,.rst-content .danger,.rst-content .eqno .headerlink:before,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-alert,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before,input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week],select,textarea{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}/*! * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) - */@font-face{font-family:FontAwesome;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713);src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix&v=4.7.0) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#fontawesomeregular) format("svg");font-weight:400;font-style:normal}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14286em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14286em;width:2.14286em;top:.14286em;text-align:center}.fa-li.fa-lg{left:-1.85714em}.fa-border{padding:.2em .25em .15em;border:.08em solid #eee;border-radius:.1em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa-pull-left.icon,.fa.fa-pull-left,.rst-content .code-block-caption .fa-pull-left.headerlink,.rst-content .eqno .fa-pull-left.headerlink,.rst-content .fa-pull-left.admonition-title,.rst-content code.download span.fa-pull-left:first-child,.rst-content dl dt .fa-pull-left.headerlink,.rst-content h1 .fa-pull-left.headerlink,.rst-content h2 .fa-pull-left.headerlink,.rst-content h3 .fa-pull-left.headerlink,.rst-content h4 .fa-pull-left.headerlink,.rst-content h5 .fa-pull-left.headerlink,.rst-content h6 .fa-pull-left.headerlink,.rst-content p .fa-pull-left.headerlink,.rst-content table>caption .fa-pull-left.headerlink,.rst-content tt.download span.fa-pull-left:first-child,.wy-menu-vertical li.current>a button.fa-pull-left.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-left.toctree-expand,.wy-menu-vertical li button.fa-pull-left.toctree-expand{margin-right:.3em}.fa-pull-right.icon,.fa.fa-pull-right,.rst-content .code-block-caption .fa-pull-right.headerlink,.rst-content .eqno .fa-pull-right.headerlink,.rst-content .fa-pull-right.admonition-title,.rst-content code.download span.fa-pull-right:first-child,.rst-content dl dt .fa-pull-right.headerlink,.rst-content h1 .fa-pull-right.headerlink,.rst-content h2 .fa-pull-right.headerlink,.rst-content h3 .fa-pull-right.headerlink,.rst-content h4 .fa-pull-right.headerlink,.rst-content h5 .fa-pull-right.headerlink,.rst-content h6 .fa-pull-right.headerlink,.rst-content p .fa-pull-right.headerlink,.rst-content table>caption .fa-pull-right.headerlink,.rst-content tt.download span.fa-pull-right:first-child,.wy-menu-vertical li.current>a button.fa-pull-right.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-right.toctree-expand,.wy-menu-vertical li button.fa-pull-right.toctree-expand{margin-left:.3em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left,.pull-left.icon,.rst-content .code-block-caption .pull-left.headerlink,.rst-content .eqno .pull-left.headerlink,.rst-content .pull-left.admonition-title,.rst-content code.download span.pull-left:first-child,.rst-content dl dt .pull-left.headerlink,.rst-content h1 .pull-left.headerlink,.rst-content h2 .pull-left.headerlink,.rst-content h3 .pull-left.headerlink,.rst-content h4 .pull-left.headerlink,.rst-content h5 .pull-left.headerlink,.rst-content h6 .pull-left.headerlink,.rst-content p .pull-left.headerlink,.rst-content table>caption .pull-left.headerlink,.rst-content tt.download span.pull-left:first-child,.wy-menu-vertical li.current>a button.pull-left.toctree-expand,.wy-menu-vertical li.on a button.pull-left.toctree-expand,.wy-menu-vertical li button.pull-left.toctree-expand{margin-right:.3em}.fa.pull-right,.pull-right.icon,.rst-content .code-block-caption .pull-right.headerlink,.rst-content .eqno .pull-right.headerlink,.rst-content .pull-right.admonition-title,.rst-content code.download span.pull-right:first-child,.rst-content dl dt .pull-right.headerlink,.rst-content h1 .pull-right.headerlink,.rst-content h2 .pull-right.headerlink,.rst-content h3 .pull-right.headerlink,.rst-content h4 .pull-right.headerlink,.rst-content h5 .pull-right.headerlink,.rst-content h6 .pull-right.headerlink,.rst-content p .pull-right.headerlink,.rst-content table>caption .pull-right.headerlink,.rst-content tt.download span.pull-right:first-child,.wy-menu-vertical li.current>a button.pull-right.toctree-expand,.wy-menu-vertical li.on a button.pull-right.toctree-expand,.wy-menu-vertical li button.pull-right.toctree-expand{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s linear infinite;animation:fa-spin 2s linear infinite}.fa-pulse{-webkit-animation:fa-spin 1s steps(8) infinite;animation:fa-spin 1s steps(8) infinite}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scaleX(-1);-ms-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scaleY(-1);-ms-transform:scaleY(-1);transform:scaleY(-1)}:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:""}.fa-music:before{content:""}.fa-search:before,.icon-search:before{content:""}.fa-envelope-o:before{content:""}.fa-heart:before{content:""}.fa-star:before{content:""}.fa-star-o:before{content:""}.fa-user:before{content:""}.fa-film:before{content:""}.fa-th-large:before{content:""}.fa-th:before{content:""}.fa-th-list:before{content:""}.fa-check:before{content:""}.fa-close:before,.fa-remove:before,.fa-times:before{content:""}.fa-search-plus:before{content:""}.fa-search-minus:before{content:""}.fa-power-off:before{content:""}.fa-signal:before{content:""}.fa-cog:before,.fa-gear:before{content:""}.fa-trash-o:before{content:""}.fa-home:before,.icon-home:before{content:""}.fa-file-o:before{content:""}.fa-clock-o:before{content:""}.fa-road:before{content:""}.fa-download:before,.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{content:""}.fa-arrow-circle-o-down:before{content:""}.fa-arrow-circle-o-up:before{content:""}.fa-inbox:before{content:""}.fa-play-circle-o:before{content:""}.fa-repeat:before,.fa-rotate-right:before{content:""}.fa-refresh:before{content:""}.fa-list-alt:before{content:""}.fa-lock:before{content:""}.fa-flag:before{content:""}.fa-headphones:before{content:""}.fa-volume-off:before{content:""}.fa-volume-down:before{content:""}.fa-volume-up:before{content:""}.fa-qrcode:before{content:""}.fa-barcode:before{content:""}.fa-tag:before{content:""}.fa-tags:before{content:""}.fa-book:before,.icon-book:before{content:""}.fa-bookmark:before{content:""}.fa-print:before{content:""}.fa-camera:before{content:""}.fa-font:before{content:""}.fa-bold:before{content:""}.fa-italic:before{content:""}.fa-text-height:before{content:""}.fa-text-width:before{content:""}.fa-align-left:before{content:""}.fa-align-center:before{content:""}.fa-align-right:before{content:""}.fa-align-justify:before{content:""}.fa-list:before{content:""}.fa-dedent:before,.fa-outdent:before{content:""}.fa-indent:before{content:""}.fa-video-camera:before{content:""}.fa-image:before,.fa-photo:before,.fa-picture-o:before{content:""}.fa-pencil:before{content:""}.fa-map-marker:before{content:""}.fa-adjust:before{content:""}.fa-tint:before{content:""}.fa-edit:before,.fa-pencil-square-o:before{content:""}.fa-share-square-o:before{content:""}.fa-check-square-o:before{content:""}.fa-arrows:before{content:""}.fa-step-backward:before{content:""}.fa-fast-backward:before{content:""}.fa-backward:before{content:""}.fa-play:before{content:""}.fa-pause:before{content:""}.fa-stop:before{content:""}.fa-forward:before{content:""}.fa-fast-forward:before{content:""}.fa-step-forward:before{content:""}.fa-eject:before{content:""}.fa-chevron-left:before{content:""}.fa-chevron-right:before{content:""}.fa-plus-circle:before{content:""}.fa-minus-circle:before{content:""}.fa-times-circle:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:""}.fa-check-circle:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:""}.fa-question-circle:before{content:""}.fa-info-circle:before{content:""}.fa-crosshairs:before{content:""}.fa-times-circle-o:before{content:""}.fa-check-circle-o:before{content:""}.fa-ban:before{content:""}.fa-arrow-left:before{content:""}.fa-arrow-right:before{content:""}.fa-arrow-up:before{content:""}.fa-arrow-down:before{content:""}.fa-mail-forward:before,.fa-share:before{content:""}.fa-expand:before{content:""}.fa-compress:before{content:""}.fa-plus:before{content:""}.fa-minus:before{content:""}.fa-asterisk:before{content:""}.fa-exclamation-circle:before,.rst-content .admonition-title:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before{content:""}.fa-gift:before{content:""}.fa-leaf:before{content:""}.fa-fire:before,.icon-fire:before{content:""}.fa-eye:before{content:""}.fa-eye-slash:before{content:""}.fa-exclamation-triangle:before,.fa-warning:before{content:""}.fa-plane:before{content:""}.fa-calendar:before{content:""}.fa-random:before{content:""}.fa-comment:before{content:""}.fa-magnet:before{content:""}.fa-chevron-up:before{content:""}.fa-chevron-down:before{content:""}.fa-retweet:before{content:""}.fa-shopping-cart:before{content:""}.fa-folder:before{content:""}.fa-folder-open:before{content:""}.fa-arrows-v:before{content:""}.fa-arrows-h:before{content:""}.fa-bar-chart-o:before,.fa-bar-chart:before{content:""}.fa-twitter-square:before{content:""}.fa-facebook-square:before{content:""}.fa-camera-retro:before{content:""}.fa-key:before{content:""}.fa-cogs:before,.fa-gears:before{content:""}.fa-comments:before{content:""}.fa-thumbs-o-up:before{content:""}.fa-thumbs-o-down:before{content:""}.fa-star-half:before{content:""}.fa-heart-o:before{content:""}.fa-sign-out:before{content:""}.fa-linkedin-square:before{content:""}.fa-thumb-tack:before{content:""}.fa-external-link:before{content:""}.fa-sign-in:before{content:""}.fa-trophy:before{content:""}.fa-github-square:before{content:""}.fa-upload:before{content:""}.fa-lemon-o:before{content:""}.fa-phone:before{content:""}.fa-square-o:before{content:""}.fa-bookmark-o:before{content:""}.fa-phone-square:before{content:""}.fa-twitter:before{content:""}.fa-facebook-f:before,.fa-facebook:before{content:""}.fa-github:before,.icon-github:before{content:""}.fa-unlock:before{content:""}.fa-credit-card:before{content:""}.fa-feed:before,.fa-rss:before{content:""}.fa-hdd-o:before{content:""}.fa-bullhorn:before{content:""}.fa-bell:before{content:""}.fa-certificate:before{content:""}.fa-hand-o-right:before{content:""}.fa-hand-o-left:before{content:""}.fa-hand-o-up:before{content:""}.fa-hand-o-down:before{content:""}.fa-arrow-circle-left:before,.icon-circle-arrow-left:before{content:""}.fa-arrow-circle-right:before,.icon-circle-arrow-right:before{content:""}.fa-arrow-circle-up:before{content:""}.fa-arrow-circle-down:before{content:""}.fa-globe:before{content:""}.fa-wrench:before{content:""}.fa-tasks:before{content:""}.fa-filter:before{content:""}.fa-briefcase:before{content:""}.fa-arrows-alt:before{content:""}.fa-group:before,.fa-users:before{content:""}.fa-chain:before,.fa-link:before,.icon-link:before{content:""}.fa-cloud:before{content:""}.fa-flask:before{content:""}.fa-cut:before,.fa-scissors:before{content:""}.fa-copy:before,.fa-files-o:before{content:""}.fa-paperclip:before{content:""}.fa-floppy-o:before,.fa-save:before{content:""}.fa-square:before{content:""}.fa-bars:before,.fa-navicon:before,.fa-reorder:before{content:""}.fa-list-ul:before{content:""}.fa-list-ol:before{content:""}.fa-strikethrough:before{content:""}.fa-underline:before{content:""}.fa-table:before{content:""}.fa-magic:before{content:""}.fa-truck:before{content:""}.fa-pinterest:before{content:""}.fa-pinterest-square:before{content:""}.fa-google-plus-square:before{content:""}.fa-google-plus:before{content:""}.fa-money:before{content:""}.fa-caret-down:before,.icon-caret-down:before,.wy-dropdown .caret:before{content:""}.fa-caret-up:before{content:""}.fa-caret-left:before{content:""}.fa-caret-right:before{content:""}.fa-columns:before{content:""}.fa-sort:before,.fa-unsorted:before{content:""}.fa-sort-desc:before,.fa-sort-down:before{content:""}.fa-sort-asc:before,.fa-sort-up:before{content:""}.fa-envelope:before{content:""}.fa-linkedin:before{content:""}.fa-rotate-left:before,.fa-undo:before{content:""}.fa-gavel:before,.fa-legal:before{content:""}.fa-dashboard:before,.fa-tachometer:before{content:""}.fa-comment-o:before{content:""}.fa-comments-o:before{content:""}.fa-bolt:before,.fa-flash:before{content:""}.fa-sitemap:before{content:""}.fa-umbrella:before{content:""}.fa-clipboard:before,.fa-paste:before{content:""}.fa-lightbulb-o:before{content:""}.fa-exchange:before{content:""}.fa-cloud-download:before{content:""}.fa-cloud-upload:before{content:""}.fa-user-md:before{content:""}.fa-stethoscope:before{content:""}.fa-suitcase:before{content:""}.fa-bell-o:before{content:""}.fa-coffee:before{content:""}.fa-cutlery:before{content:""}.fa-file-text-o:before{content:""}.fa-building-o:before{content:""}.fa-hospital-o:before{content:""}.fa-ambulance:before{content:""}.fa-medkit:before{content:""}.fa-fighter-jet:before{content:""}.fa-beer:before{content:""}.fa-h-square:before{content:""}.fa-plus-square:before{content:""}.fa-angle-double-left:before{content:""}.fa-angle-double-right:before{content:""}.fa-angle-double-up:before{content:""}.fa-angle-double-down:before{content:""}.fa-angle-left:before{content:""}.fa-angle-right:before{content:""}.fa-angle-up:before{content:""}.fa-angle-down:before{content:""}.fa-desktop:before{content:""}.fa-laptop:before{content:""}.fa-tablet:before{content:""}.fa-mobile-phone:before,.fa-mobile:before{content:""}.fa-circle-o:before{content:""}.fa-quote-left:before{content:""}.fa-quote-right:before{content:""}.fa-spinner:before{content:""}.fa-circle:before{content:""}.fa-mail-reply:before,.fa-reply:before{content:""}.fa-github-alt:before{content:""}.fa-folder-o:before{content:""}.fa-folder-open-o:before{content:""}.fa-smile-o:before{content:""}.fa-frown-o:before{content:""}.fa-meh-o:before{content:""}.fa-gamepad:before{content:""}.fa-keyboard-o:before{content:""}.fa-flag-o:before{content:""}.fa-flag-checkered:before{content:""}.fa-terminal:before{content:""}.fa-code:before{content:""}.fa-mail-reply-all:before,.fa-reply-all:before{content:""}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:""}.fa-location-arrow:before{content:""}.fa-crop:before{content:""}.fa-code-fork:before{content:""}.fa-chain-broken:before,.fa-unlink:before{content:""}.fa-question:before{content:""}.fa-info:before{content:""}.fa-exclamation:before{content:""}.fa-superscript:before{content:""}.fa-subscript:before{content:""}.fa-eraser:before{content:""}.fa-puzzle-piece:before{content:""}.fa-microphone:before{content:""}.fa-microphone-slash:before{content:""}.fa-shield:before{content:""}.fa-calendar-o:before{content:""}.fa-fire-extinguisher:before{content:""}.fa-rocket:before{content:""}.fa-maxcdn:before{content:""}.fa-chevron-circle-left:before{content:""}.fa-chevron-circle-right:before{content:""}.fa-chevron-circle-up:before{content:""}.fa-chevron-circle-down:before{content:""}.fa-html5:before{content:""}.fa-css3:before{content:""}.fa-anchor:before{content:""}.fa-unlock-alt:before{content:""}.fa-bullseye:before{content:""}.fa-ellipsis-h:before{content:""}.fa-ellipsis-v:before{content:""}.fa-rss-square:before{content:""}.fa-play-circle:before{content:""}.fa-ticket:before{content:""}.fa-minus-square:before{content:""}.fa-minus-square-o:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before{content:""}.fa-level-up:before{content:""}.fa-level-down:before{content:""}.fa-check-square:before{content:""}.fa-pencil-square:before{content:""}.fa-external-link-square:before{content:""}.fa-share-square:before{content:""}.fa-compass:before{content:""}.fa-caret-square-o-down:before,.fa-toggle-down:before{content:""}.fa-caret-square-o-up:before,.fa-toggle-up:before{content:""}.fa-caret-square-o-right:before,.fa-toggle-right:before{content:""}.fa-eur:before,.fa-euro:before{content:""}.fa-gbp:before{content:""}.fa-dollar:before,.fa-usd:before{content:""}.fa-inr:before,.fa-rupee:before{content:""}.fa-cny:before,.fa-jpy:before,.fa-rmb:before,.fa-yen:before{content:""}.fa-rouble:before,.fa-rub:before,.fa-ruble:before{content:""}.fa-krw:before,.fa-won:before{content:""}.fa-bitcoin:before,.fa-btc:before{content:""}.fa-file:before{content:""}.fa-file-text:before{content:""}.fa-sort-alpha-asc:before{content:""}.fa-sort-alpha-desc:before{content:""}.fa-sort-amount-asc:before{content:""}.fa-sort-amount-desc:before{content:""}.fa-sort-numeric-asc:before{content:""}.fa-sort-numeric-desc:before{content:""}.fa-thumbs-up:before{content:""}.fa-thumbs-down:before{content:""}.fa-youtube-square:before{content:""}.fa-youtube:before{content:""}.fa-xing:before{content:""}.fa-xing-square:before{content:""}.fa-youtube-play:before{content:""}.fa-dropbox:before{content:""}.fa-stack-overflow:before{content:""}.fa-instagram:before{content:""}.fa-flickr:before{content:""}.fa-adn:before{content:""}.fa-bitbucket:before,.icon-bitbucket:before{content:""}.fa-bitbucket-square:before{content:""}.fa-tumblr:before{content:""}.fa-tumblr-square:before{content:""}.fa-long-arrow-down:before{content:""}.fa-long-arrow-up:before{content:""}.fa-long-arrow-left:before{content:""}.fa-long-arrow-right:before{content:""}.fa-apple:before{content:""}.fa-windows:before{content:""}.fa-android:before{content:""}.fa-linux:before{content:""}.fa-dribbble:before{content:""}.fa-skype:before{content:""}.fa-foursquare:before{content:""}.fa-trello:before{content:""}.fa-female:before{content:""}.fa-male:before{content:""}.fa-gittip:before,.fa-gratipay:before{content:""}.fa-sun-o:before{content:""}.fa-moon-o:before{content:""}.fa-archive:before{content:""}.fa-bug:before{content:""}.fa-vk:before{content:""}.fa-weibo:before{content:""}.fa-renren:before{content:""}.fa-pagelines:before{content:""}.fa-stack-exchange:before{content:""}.fa-arrow-circle-o-right:before{content:""}.fa-arrow-circle-o-left:before{content:""}.fa-caret-square-o-left:before,.fa-toggle-left:before{content:""}.fa-dot-circle-o:before{content:""}.fa-wheelchair:before{content:""}.fa-vimeo-square:before{content:""}.fa-try:before,.fa-turkish-lira:before{content:""}.fa-plus-square-o:before,.wy-menu-vertical li button.toctree-expand:before{content:""}.fa-space-shuttle:before{content:""}.fa-slack:before{content:""}.fa-envelope-square:before{content:""}.fa-wordpress:before{content:""}.fa-openid:before{content:""}.fa-bank:before,.fa-institution:before,.fa-university:before{content:""}.fa-graduation-cap:before,.fa-mortar-board:before{content:""}.fa-yahoo:before{content:""}.fa-google:before{content:""}.fa-reddit:before{content:""}.fa-reddit-square:before{content:""}.fa-stumbleupon-circle:before{content:""}.fa-stumbleupon:before{content:""}.fa-delicious:before{content:""}.fa-digg:before{content:""}.fa-pied-piper-pp:before{content:""}.fa-pied-piper-alt:before{content:""}.fa-drupal:before{content:""}.fa-joomla:before{content:""}.fa-language:before{content:""}.fa-fax:before{content:""}.fa-building:before{content:""}.fa-child:before{content:""}.fa-paw:before{content:""}.fa-spoon:before{content:""}.fa-cube:before{content:""}.fa-cubes:before{content:""}.fa-behance:before{content:""}.fa-behance-square:before{content:""}.fa-steam:before{content:""}.fa-steam-square:before{content:""}.fa-recycle:before{content:""}.fa-automobile:before,.fa-car:before{content:""}.fa-cab:before,.fa-taxi:before{content:""}.fa-tree:before{content:""}.fa-spotify:before{content:""}.fa-deviantart:before{content:""}.fa-soundcloud:before{content:""}.fa-database:before{content:""}.fa-file-pdf-o:before{content:""}.fa-file-word-o:before{content:""}.fa-file-excel-o:before{content:""}.fa-file-powerpoint-o:before{content:""}.fa-file-image-o:before,.fa-file-photo-o:before,.fa-file-picture-o:before{content:""}.fa-file-archive-o:before,.fa-file-zip-o:before{content:""}.fa-file-audio-o:before,.fa-file-sound-o:before{content:""}.fa-file-movie-o:before,.fa-file-video-o:before{content:""}.fa-file-code-o:before{content:""}.fa-vine:before{content:""}.fa-codepen:before{content:""}.fa-jsfiddle:before{content:""}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-ring:before,.fa-life-saver:before,.fa-support:before{content:""}.fa-circle-o-notch:before{content:""}.fa-ra:before,.fa-rebel:before,.fa-resistance:before{content:""}.fa-empire:before,.fa-ge:before{content:""}.fa-git-square:before{content:""}.fa-git:before{content:""}.fa-hacker-news:before,.fa-y-combinator-square:before,.fa-yc-square:before{content:""}.fa-tencent-weibo:before{content:""}.fa-qq:before{content:""}.fa-wechat:before,.fa-weixin:before{content:""}.fa-paper-plane:before,.fa-send:before{content:""}.fa-paper-plane-o:before,.fa-send-o:before{content:""}.fa-history:before{content:""}.fa-circle-thin:before{content:""}.fa-header:before{content:""}.fa-paragraph:before{content:""}.fa-sliders:before{content:""}.fa-share-alt:before{content:""}.fa-share-alt-square:before{content:""}.fa-bomb:before{content:""}.fa-futbol-o:before,.fa-soccer-ball-o:before{content:""}.fa-tty:before{content:""}.fa-binoculars:before{content:""}.fa-plug:before{content:""}.fa-slideshare:before{content:""}.fa-twitch:before{content:""}.fa-yelp:before{content:""}.fa-newspaper-o:before{content:""}.fa-wifi:before{content:""}.fa-calculator:before{content:""}.fa-paypal:before{content:""}.fa-google-wallet:before{content:""}.fa-cc-visa:before{content:""}.fa-cc-mastercard:before{content:""}.fa-cc-discover:before{content:""}.fa-cc-amex:before{content:""}.fa-cc-paypal:before{content:""}.fa-cc-stripe:before{content:""}.fa-bell-slash:before{content:""}.fa-bell-slash-o:before{content:""}.fa-trash:before{content:""}.fa-copyright:before{content:""}.fa-at:before{content:""}.fa-eyedropper:before{content:""}.fa-paint-brush:before{content:""}.fa-birthday-cake:before{content:""}.fa-area-chart:before{content:""}.fa-pie-chart:before{content:""}.fa-line-chart:before{content:""}.fa-lastfm:before{content:""}.fa-lastfm-square:before{content:""}.fa-toggle-off:before{content:""}.fa-toggle-on:before{content:""}.fa-bicycle:before{content:""}.fa-bus:before{content:""}.fa-ioxhost:before{content:""}.fa-angellist:before{content:""}.fa-cc:before{content:""}.fa-ils:before,.fa-shekel:before,.fa-sheqel:before{content:""}.fa-meanpath:before{content:""}.fa-buysellads:before{content:""}.fa-connectdevelop:before{content:""}.fa-dashcube:before{content:""}.fa-forumbee:before{content:""}.fa-leanpub:before{content:""}.fa-sellsy:before{content:""}.fa-shirtsinbulk:before{content:""}.fa-simplybuilt:before{content:""}.fa-skyatlas:before{content:""}.fa-cart-plus:before{content:""}.fa-cart-arrow-down:before{content:""}.fa-diamond:before{content:""}.fa-ship:before{content:""}.fa-user-secret:before{content:""}.fa-motorcycle:before{content:""}.fa-street-view:before{content:""}.fa-heartbeat:before{content:""}.fa-venus:before{content:""}.fa-mars:before{content:""}.fa-mercury:before{content:""}.fa-intersex:before,.fa-transgender:before{content:""}.fa-transgender-alt:before{content:""}.fa-venus-double:before{content:""}.fa-mars-double:before{content:""}.fa-venus-mars:before{content:""}.fa-mars-stroke:before{content:""}.fa-mars-stroke-v:before{content:""}.fa-mars-stroke-h:before{content:""}.fa-neuter:before{content:""}.fa-genderless:before{content:""}.fa-facebook-official:before{content:""}.fa-pinterest-p:before{content:""}.fa-whatsapp:before{content:""}.fa-server:before{content:""}.fa-user-plus:before{content:""}.fa-user-times:before{content:""}.fa-bed:before,.fa-hotel:before{content:""}.fa-viacoin:before{content:""}.fa-train:before{content:""}.fa-subway:before{content:""}.fa-medium:before{content:""}.fa-y-combinator:before,.fa-yc:before{content:""}.fa-optin-monster:before{content:""}.fa-opencart:before{content:""}.fa-expeditedssl:before{content:""}.fa-battery-4:before,.fa-battery-full:before,.fa-battery:before{content:""}.fa-battery-3:before,.fa-battery-three-quarters:before{content:""}.fa-battery-2:before,.fa-battery-half:before{content:""}.fa-battery-1:before,.fa-battery-quarter:before{content:""}.fa-battery-0:before,.fa-battery-empty:before{content:""}.fa-mouse-pointer:before{content:""}.fa-i-cursor:before{content:""}.fa-object-group:before{content:""}.fa-object-ungroup:before{content:""}.fa-sticky-note:before{content:""}.fa-sticky-note-o:before{content:""}.fa-cc-jcb:before{content:""}.fa-cc-diners-club:before{content:""}.fa-clone:before{content:""}.fa-balance-scale:before{content:""}.fa-hourglass-o:before{content:""}.fa-hourglass-1:before,.fa-hourglass-start:before{content:""}.fa-hourglass-2:before,.fa-hourglass-half:before{content:""}.fa-hourglass-3:before,.fa-hourglass-end:before{content:""}.fa-hourglass:before{content:""}.fa-hand-grab-o:before,.fa-hand-rock-o:before{content:""}.fa-hand-paper-o:before,.fa-hand-stop-o:before{content:""}.fa-hand-scissors-o:before{content:""}.fa-hand-lizard-o:before{content:""}.fa-hand-spock-o:before{content:""}.fa-hand-pointer-o:before{content:""}.fa-hand-peace-o:before{content:""}.fa-trademark:before{content:""}.fa-registered:before{content:""}.fa-creative-commons:before{content:""}.fa-gg:before{content:""}.fa-gg-circle:before{content:""}.fa-tripadvisor:before{content:""}.fa-odnoklassniki:before{content:""}.fa-odnoklassniki-square:before{content:""}.fa-get-pocket:before{content:""}.fa-wikipedia-w:before{content:""}.fa-safari:before{content:""}.fa-chrome:before{content:""}.fa-firefox:before{content:""}.fa-opera:before{content:""}.fa-internet-explorer:before{content:""}.fa-television:before,.fa-tv:before{content:""}.fa-contao:before{content:""}.fa-500px:before{content:""}.fa-amazon:before{content:""}.fa-calendar-plus-o:before{content:""}.fa-calendar-minus-o:before{content:""}.fa-calendar-times-o:before{content:""}.fa-calendar-check-o:before{content:""}.fa-industry:before{content:""}.fa-map-pin:before{content:""}.fa-map-signs:before{content:""}.fa-map-o:before{content:""}.fa-map:before{content:""}.fa-commenting:before{content:""}.fa-commenting-o:before{content:""}.fa-houzz:before{content:""}.fa-vimeo:before{content:""}.fa-black-tie:before{content:""}.fa-fonticons:before{content:""}.fa-reddit-alien:before{content:""}.fa-edge:before{content:""}.fa-credit-card-alt:before{content:""}.fa-codiepie:before{content:""}.fa-modx:before{content:""}.fa-fort-awesome:before{content:""}.fa-usb:before{content:""}.fa-product-hunt:before{content:""}.fa-mixcloud:before{content:""}.fa-scribd:before{content:""}.fa-pause-circle:before{content:""}.fa-pause-circle-o:before{content:""}.fa-stop-circle:before{content:""}.fa-stop-circle-o:before{content:""}.fa-shopping-bag:before{content:""}.fa-shopping-basket:before{content:""}.fa-hashtag:before{content:""}.fa-bluetooth:before{content:""}.fa-bluetooth-b:before{content:""}.fa-percent:before{content:""}.fa-gitlab:before,.icon-gitlab:before{content:""}.fa-wpbeginner:before{content:""}.fa-wpforms:before{content:""}.fa-envira:before{content:""}.fa-universal-access:before{content:""}.fa-wheelchair-alt:before{content:""}.fa-question-circle-o:before{content:""}.fa-blind:before{content:""}.fa-audio-description:before{content:""}.fa-volume-control-phone:before{content:""}.fa-braille:before{content:""}.fa-assistive-listening-systems:before{content:""}.fa-american-sign-language-interpreting:before,.fa-asl-interpreting:before{content:""}.fa-deaf:before,.fa-deafness:before,.fa-hard-of-hearing:before{content:""}.fa-glide:before{content:""}.fa-glide-g:before{content:""}.fa-sign-language:before,.fa-signing:before{content:""}.fa-low-vision:before{content:""}.fa-viadeo:before{content:""}.fa-viadeo-square:before{content:""}.fa-snapchat:before{content:""}.fa-snapchat-ghost:before{content:""}.fa-snapchat-square:before{content:""}.fa-pied-piper:before{content:""}.fa-first-order:before{content:""}.fa-yoast:before{content:""}.fa-themeisle:before{content:""}.fa-google-plus-circle:before,.fa-google-plus-official:before{content:""}.fa-fa:before,.fa-font-awesome:before{content:""}.fa-handshake-o:before{content:""}.fa-envelope-open:before{content:""}.fa-envelope-open-o:before{content:""}.fa-linode:before{content:""}.fa-address-book:before{content:""}.fa-address-book-o:before{content:""}.fa-address-card:before,.fa-vcard:before{content:""}.fa-address-card-o:before,.fa-vcard-o:before{content:""}.fa-user-circle:before{content:""}.fa-user-circle-o:before{content:""}.fa-user-o:before{content:""}.fa-id-badge:before{content:""}.fa-drivers-license:before,.fa-id-card:before{content:""}.fa-drivers-license-o:before,.fa-id-card-o:before{content:""}.fa-quora:before{content:""}.fa-free-code-camp:before{content:""}.fa-telegram:before{content:""}.fa-thermometer-4:before,.fa-thermometer-full:before,.fa-thermometer:before{content:""}.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:""}.fa-thermometer-2:before,.fa-thermometer-half:before{content:""}.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:""}.fa-thermometer-0:before,.fa-thermometer-empty:before{content:""}.fa-shower:before{content:""}.fa-bath:before,.fa-bathtub:before,.fa-s15:before{content:""}.fa-podcast:before{content:""}.fa-window-maximize:before{content:""}.fa-window-minimize:before{content:""}.fa-window-restore:before{content:""}.fa-times-rectangle:before,.fa-window-close:before{content:""}.fa-times-rectangle-o:before,.fa-window-close-o:before{content:""}.fa-bandcamp:before{content:""}.fa-grav:before{content:""}.fa-etsy:before{content:""}.fa-imdb:before{content:""}.fa-ravelry:before{content:""}.fa-eercast:before{content:""}.fa-microchip:before{content:""}.fa-snowflake-o:before{content:""}.fa-superpowers:before{content:""}.fa-wpexplorer:before{content:""}.fa-meetup:before{content:""}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-dropdown .caret,.wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{font-family:inherit}.fa:before,.icon:before,.rst-content .admonition-title:before,.rst-content .code-block-caption .headerlink:before,.rst-content .eqno .headerlink:before,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before{font-family:FontAwesome;display:inline-block;font-style:normal;font-weight:400;line-height:1;text-decoration:inherit}.rst-content .code-block-caption a .headerlink,.rst-content .eqno a .headerlink,.rst-content a .admonition-title,.rst-content code.download a span:first-child,.rst-content dl dt a .headerlink,.rst-content h1 a .headerlink,.rst-content h2 a .headerlink,.rst-content h3 a .headerlink,.rst-content h4 a .headerlink,.rst-content h5 a .headerlink,.rst-content h6 a .headerlink,.rst-content p.caption a .headerlink,.rst-content p a .headerlink,.rst-content table>caption a .headerlink,.rst-content tt.download a span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li a button.toctree-expand,a .fa,a .icon,a .rst-content .admonition-title,a .rst-content .code-block-caption .headerlink,a .rst-content .eqno .headerlink,a .rst-content code.download span:first-child,a .rst-content dl dt .headerlink,a .rst-content h1 .headerlink,a .rst-content h2 .headerlink,a .rst-content h3 .headerlink,a .rst-content h4 .headerlink,a .rst-content h5 .headerlink,a .rst-content h6 .headerlink,a .rst-content p.caption .headerlink,a .rst-content p .headerlink,a .rst-content table>caption .headerlink,a .rst-content tt.download span:first-child,a .wy-menu-vertical li button.toctree-expand{display:inline-block;text-decoration:inherit}.btn .fa,.btn .icon,.btn .rst-content .admonition-title,.btn .rst-content .code-block-caption .headerlink,.btn .rst-content .eqno .headerlink,.btn .rst-content code.download span:first-child,.btn .rst-content dl dt .headerlink,.btn .rst-content h1 .headerlink,.btn .rst-content h2 .headerlink,.btn .rst-content h3 .headerlink,.btn .rst-content h4 .headerlink,.btn .rst-content h5 .headerlink,.btn .rst-content h6 .headerlink,.btn .rst-content p .headerlink,.btn .rst-content table>caption .headerlink,.btn .rst-content tt.download span:first-child,.btn .wy-menu-vertical li.current>a button.toctree-expand,.btn .wy-menu-vertical li.on a button.toctree-expand,.btn .wy-menu-vertical li button.toctree-expand,.nav .fa,.nav .icon,.nav .rst-content .admonition-title,.nav .rst-content .code-block-caption .headerlink,.nav .rst-content .eqno .headerlink,.nav .rst-content code.download span:first-child,.nav .rst-content dl dt .headerlink,.nav .rst-content h1 .headerlink,.nav .rst-content h2 .headerlink,.nav .rst-content h3 .headerlink,.nav .rst-content h4 .headerlink,.nav .rst-content h5 .headerlink,.nav .rst-content h6 .headerlink,.nav .rst-content p .headerlink,.nav .rst-content table>caption .headerlink,.nav .rst-content tt.download span:first-child,.nav .wy-menu-vertical li.current>a button.toctree-expand,.nav .wy-menu-vertical li.on a button.toctree-expand,.nav .wy-menu-vertical li button.toctree-expand,.rst-content .btn .admonition-title,.rst-content .code-block-caption .btn .headerlink,.rst-content .code-block-caption .nav .headerlink,.rst-content .eqno .btn .headerlink,.rst-content .eqno .nav .headerlink,.rst-content .nav .admonition-title,.rst-content code.download .btn span:first-child,.rst-content code.download .nav span:first-child,.rst-content dl dt .btn .headerlink,.rst-content dl dt .nav .headerlink,.rst-content h1 .btn .headerlink,.rst-content h1 .nav .headerlink,.rst-content h2 .btn .headerlink,.rst-content h2 .nav .headerlink,.rst-content h3 .btn .headerlink,.rst-content h3 .nav .headerlink,.rst-content h4 .btn .headerlink,.rst-content h4 .nav .headerlink,.rst-content h5 .btn .headerlink,.rst-content h5 .nav .headerlink,.rst-content h6 .btn .headerlink,.rst-content h6 .nav .headerlink,.rst-content p .btn .headerlink,.rst-content p .nav .headerlink,.rst-content table>caption .btn .headerlink,.rst-content table>caption .nav .headerlink,.rst-content tt.download .btn span:first-child,.rst-content tt.download .nav span:first-child,.wy-menu-vertical li .btn button.toctree-expand,.wy-menu-vertical li.current>a .btn button.toctree-expand,.wy-menu-vertical li.current>a .nav button.toctree-expand,.wy-menu-vertical li .nav button.toctree-expand,.wy-menu-vertical li.on a .btn button.toctree-expand,.wy-menu-vertical li.on a .nav button.toctree-expand{display:inline}.btn .fa-large.icon,.btn .fa.fa-large,.btn .rst-content .code-block-caption .fa-large.headerlink,.btn .rst-content .eqno .fa-large.headerlink,.btn .rst-content .fa-large.admonition-title,.btn .rst-content code.download span.fa-large:first-child,.btn .rst-content dl dt .fa-large.headerlink,.btn .rst-content h1 .fa-large.headerlink,.btn .rst-content h2 .fa-large.headerlink,.btn .rst-content h3 .fa-large.headerlink,.btn .rst-content h4 .fa-large.headerlink,.btn .rst-content h5 .fa-large.headerlink,.btn .rst-content h6 .fa-large.headerlink,.btn .rst-content p .fa-large.headerlink,.btn .rst-content table>caption .fa-large.headerlink,.btn .rst-content tt.download span.fa-large:first-child,.btn .wy-menu-vertical li button.fa-large.toctree-expand,.nav .fa-large.icon,.nav .fa.fa-large,.nav .rst-content .code-block-caption .fa-large.headerlink,.nav .rst-content .eqno .fa-large.headerlink,.nav .rst-content .fa-large.admonition-title,.nav .rst-content code.download span.fa-large:first-child,.nav .rst-content dl dt .fa-large.headerlink,.nav .rst-content h1 .fa-large.headerlink,.nav .rst-content h2 .fa-large.headerlink,.nav .rst-content h3 .fa-large.headerlink,.nav .rst-content h4 .fa-large.headerlink,.nav .rst-content h5 .fa-large.headerlink,.nav .rst-content h6 .fa-large.headerlink,.nav .rst-content p .fa-large.headerlink,.nav .rst-content table>caption .fa-large.headerlink,.nav .rst-content tt.download span.fa-large:first-child,.nav .wy-menu-vertical li button.fa-large.toctree-expand,.rst-content .btn .fa-large.admonition-title,.rst-content .code-block-caption .btn .fa-large.headerlink,.rst-content .code-block-caption .nav .fa-large.headerlink,.rst-content .eqno .btn .fa-large.headerlink,.rst-content .eqno .nav .fa-large.headerlink,.rst-content .nav .fa-large.admonition-title,.rst-content code.download .btn span.fa-large:first-child,.rst-content code.download .nav span.fa-large:first-child,.rst-content dl dt .btn .fa-large.headerlink,.rst-content dl dt .nav .fa-large.headerlink,.rst-content h1 .btn .fa-large.headerlink,.rst-content h1 .nav .fa-large.headerlink,.rst-content h2 .btn .fa-large.headerlink,.rst-content h2 .nav .fa-large.headerlink,.rst-content h3 .btn .fa-large.headerlink,.rst-content h3 .nav .fa-large.headerlink,.rst-content h4 .btn .fa-large.headerlink,.rst-content h4 .nav .fa-large.headerlink,.rst-content h5 .btn .fa-large.headerlink,.rst-content h5 .nav .fa-large.headerlink,.rst-content h6 .btn .fa-large.headerlink,.rst-content h6 .nav .fa-large.headerlink,.rst-content p .btn .fa-large.headerlink,.rst-content p .nav .fa-large.headerlink,.rst-content table>caption .btn .fa-large.headerlink,.rst-content table>caption .nav .fa-large.headerlink,.rst-content tt.download .btn span.fa-large:first-child,.rst-content tt.download .nav span.fa-large:first-child,.wy-menu-vertical li .btn button.fa-large.toctree-expand,.wy-menu-vertical li .nav button.fa-large.toctree-expand{line-height:.9em}.btn .fa-spin.icon,.btn .fa.fa-spin,.btn .rst-content .code-block-caption .fa-spin.headerlink,.btn .rst-content .eqno .fa-spin.headerlink,.btn .rst-content .fa-spin.admonition-title,.btn .rst-content code.download span.fa-spin:first-child,.btn .rst-content dl dt .fa-spin.headerlink,.btn .rst-content h1 .fa-spin.headerlink,.btn .rst-content h2 .fa-spin.headerlink,.btn .rst-content h3 .fa-spin.headerlink,.btn .rst-content h4 .fa-spin.headerlink,.btn .rst-content h5 .fa-spin.headerlink,.btn .rst-content h6 .fa-spin.headerlink,.btn .rst-content p .fa-spin.headerlink,.btn .rst-content table>caption .fa-spin.headerlink,.btn .rst-content tt.download span.fa-spin:first-child,.btn .wy-menu-vertical li button.fa-spin.toctree-expand,.nav .fa-spin.icon,.nav .fa.fa-spin,.nav .rst-content .code-block-caption .fa-spin.headerlink,.nav .rst-content .eqno .fa-spin.headerlink,.nav .rst-content .fa-spin.admonition-title,.nav .rst-content code.download span.fa-spin:first-child,.nav .rst-content dl dt .fa-spin.headerlink,.nav .rst-content h1 .fa-spin.headerlink,.nav .rst-content h2 .fa-spin.headerlink,.nav .rst-content h3 .fa-spin.headerlink,.nav .rst-content h4 .fa-spin.headerlink,.nav .rst-content h5 .fa-spin.headerlink,.nav .rst-content h6 .fa-spin.headerlink,.nav .rst-content p .fa-spin.headerlink,.nav .rst-content table>caption .fa-spin.headerlink,.nav .rst-content tt.download span.fa-spin:first-child,.nav .wy-menu-vertical li button.fa-spin.toctree-expand,.rst-content .btn .fa-spin.admonition-title,.rst-content .code-block-caption .btn .fa-spin.headerlink,.rst-content .code-block-caption .nav .fa-spin.headerlink,.rst-content .eqno .btn .fa-spin.headerlink,.rst-content .eqno .nav .fa-spin.headerlink,.rst-content .nav .fa-spin.admonition-title,.rst-content code.download .btn span.fa-spin:first-child,.rst-content code.download .nav span.fa-spin:first-child,.rst-content dl dt .btn .fa-spin.headerlink,.rst-content dl dt .nav .fa-spin.headerlink,.rst-content h1 .btn .fa-spin.headerlink,.rst-content h1 .nav .fa-spin.headerlink,.rst-content h2 .btn .fa-spin.headerlink,.rst-content h2 .nav .fa-spin.headerlink,.rst-content h3 .btn .fa-spin.headerlink,.rst-content h3 .nav .fa-spin.headerlink,.rst-content h4 .btn .fa-spin.headerlink,.rst-content h4 .nav .fa-spin.headerlink,.rst-content h5 .btn .fa-spin.headerlink,.rst-content h5 .nav .fa-spin.headerlink,.rst-content h6 .btn .fa-spin.headerlink,.rst-content h6 .nav .fa-spin.headerlink,.rst-content p .btn .fa-spin.headerlink,.rst-content p .nav .fa-spin.headerlink,.rst-content table>caption .btn .fa-spin.headerlink,.rst-content table>caption .nav .fa-spin.headerlink,.rst-content tt.download .btn span.fa-spin:first-child,.rst-content tt.download .nav span.fa-spin:first-child,.wy-menu-vertical li .btn button.fa-spin.toctree-expand,.wy-menu-vertical li .nav button.fa-spin.toctree-expand{display:inline-block}.btn.fa:before,.btn.icon:before,.rst-content .btn.admonition-title:before,.rst-content .code-block-caption .btn.headerlink:before,.rst-content .eqno .btn.headerlink:before,.rst-content code.download span.btn:first-child:before,.rst-content dl dt .btn.headerlink:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content p .btn.headerlink:before,.rst-content table>caption .btn.headerlink:before,.rst-content tt.download span.btn:first-child:before,.wy-menu-vertical li button.btn.toctree-expand:before{opacity:.5;-webkit-transition:opacity .05s ease-in;-moz-transition:opacity .05s ease-in;transition:opacity .05s ease-in}.btn.fa:hover:before,.btn.icon:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content .code-block-caption .btn.headerlink:hover:before,.rst-content .eqno .btn.headerlink:hover:before,.rst-content code.download span.btn:first-child:hover:before,.rst-content dl dt .btn.headerlink:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content p .btn.headerlink:hover:before,.rst-content table>caption .btn.headerlink:hover:before,.rst-content tt.download span.btn:first-child:hover:before,.wy-menu-vertical li button.btn.toctree-expand:hover:before{opacity:1}.btn-mini .fa:before,.btn-mini .icon:before,.btn-mini .rst-content .admonition-title:before,.btn-mini .rst-content .code-block-caption .headerlink:before,.btn-mini .rst-content .eqno .headerlink:before,.btn-mini .rst-content code.download span:first-child:before,.btn-mini .rst-content dl dt .headerlink:before,.btn-mini .rst-content h1 .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.btn-mini .rst-content p .headerlink:before,.btn-mini .rst-content table>caption .headerlink:before,.btn-mini .rst-content tt.download span:first-child:before,.btn-mini .wy-menu-vertical li button.toctree-expand:before,.rst-content .btn-mini .admonition-title:before,.rst-content .code-block-caption .btn-mini .headerlink:before,.rst-content .eqno .btn-mini .headerlink:before,.rst-content code.download .btn-mini span:first-child:before,.rst-content dl dt .btn-mini .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.rst-content p .btn-mini .headerlink:before,.rst-content table>caption .btn-mini .headerlink:before,.rst-content tt.download .btn-mini span:first-child:before,.wy-menu-vertical li .btn-mini button.toctree-expand:before{font-size:14px;vertical-align:-15%}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.wy-alert{padding:12px;line-height:24px;margin-bottom:24px;background:#e7f2fa}.rst-content .admonition-title,.wy-alert-title{font-weight:700;display:block;color:#fff;background:#6ab0de;padding:6px 12px;margin:-12px -12px 12px}.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.admonition,.rst-content .wy-alert-danger.admonition-todo,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.seealso,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.wy-alert.wy-alert-danger{background:#fdf3f2}.rst-content .danger .admonition-title,.rst-content .danger .wy-alert-title,.rst-content .error .admonition-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.admonition-todo .admonition-title,.rst-content .wy-alert-danger.admonition-todo .wy-alert-title,.rst-content .wy-alert-danger.admonition .admonition-title,.rst-content .wy-alert-danger.admonition .wy-alert-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.seealso .admonition-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.wy-alert.wy-alert-danger .wy-alert-title{background:#f29f97}.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .warning,.rst-content .wy-alert-warning.admonition,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.note,.rst-content .wy-alert-warning.seealso,.rst-content .wy-alert-warning.tip,.wy-alert.wy-alert-warning{background:#ffedcc}.rst-content .admonition-todo .admonition-title,.rst-content .admonition-todo .wy-alert-title,.rst-content .attention .admonition-title,.rst-content .attention .wy-alert-title,.rst-content .caution .admonition-title,.rst-content .caution .wy-alert-title,.rst-content .warning .admonition-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.admonition .admonition-title,.rst-content .wy-alert-warning.admonition .wy-alert-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .wy-alert-warning.seealso .admonition-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.wy-alert.wy-alert-warning .wy-alert-title{background:#f0b37e}.rst-content .note,.rst-content .seealso,.rst-content .wy-alert-info.admonition,.rst-content .wy-alert-info.admonition-todo,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.wy-alert.wy-alert-info{background:#e7f2fa}.rst-content .note .admonition-title,.rst-content .note .wy-alert-title,.rst-content .seealso .admonition-title,.rst-content .seealso .wy-alert-title,.rst-content .wy-alert-info.admonition-todo .admonition-title,.rst-content .wy-alert-info.admonition-todo .wy-alert-title,.rst-content .wy-alert-info.admonition .admonition-title,.rst-content .wy-alert-info.admonition .wy-alert-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.wy-alert.wy-alert-info .wy-alert-title{background:#6ab0de}.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.admonition,.rst-content .wy-alert-success.admonition-todo,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.seealso,.rst-content .wy-alert-success.warning,.wy-alert.wy-alert-success{background:#dbfaf4}.rst-content .hint .admonition-title,.rst-content .hint .wy-alert-title,.rst-content .important .admonition-title,.rst-content .important .wy-alert-title,.rst-content .tip .admonition-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.admonition-todo .admonition-title,.rst-content .wy-alert-success.admonition-todo .wy-alert-title,.rst-content .wy-alert-success.admonition .admonition-title,.rst-content .wy-alert-success.admonition .wy-alert-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.seealso .admonition-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.wy-alert.wy-alert-success .wy-alert-title{background:#1abc9c}.rst-content .wy-alert-neutral.admonition,.rst-content .wy-alert-neutral.admonition-todo,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.seealso,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.wy-alert.wy-alert-neutral{background:#f3f6f6}.rst-content .wy-alert-neutral.admonition-todo .admonition-title,.rst-content .wy-alert-neutral.admonition-todo .wy-alert-title,.rst-content .wy-alert-neutral.admonition .admonition-title,.rst-content .wy-alert-neutral.admonition .wy-alert-title,.rst-content .wy-alert-neutral.attention .admonition-title,.rst-content .wy-alert-neutral.attention .wy-alert-title,.rst-content .wy-alert-neutral.caution .admonition-title,.rst-content .wy-alert-neutral.caution .wy-alert-title,.rst-content .wy-alert-neutral.danger .admonition-title,.rst-content .wy-alert-neutral.danger .wy-alert-title,.rst-content .wy-alert-neutral.error .admonition-title,.rst-content .wy-alert-neutral.error .wy-alert-title,.rst-content .wy-alert-neutral.hint .admonition-title,.rst-content .wy-alert-neutral.hint .wy-alert-title,.rst-content .wy-alert-neutral.important .admonition-title,.rst-content .wy-alert-neutral.important .wy-alert-title,.rst-content .wy-alert-neutral.note .admonition-title,.rst-content .wy-alert-neutral.note .wy-alert-title,.rst-content .wy-alert-neutral.seealso .admonition-title,.rst-content .wy-alert-neutral.seealso .wy-alert-title,.rst-content .wy-alert-neutral.tip .admonition-title,.rst-content .wy-alert-neutral.tip .wy-alert-title,.rst-content .wy-alert-neutral.warning .admonition-title,.rst-content .wy-alert-neutral.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-neutral .admonition-title,.wy-alert.wy-alert-neutral .rst-content .admonition-title,.wy-alert.wy-alert-neutral .wy-alert-title{color:#404040;background:#e1e4e5}.rst-content .wy-alert-neutral.admonition-todo a,.rst-content .wy-alert-neutral.admonition a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.seealso a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.wy-alert.wy-alert-neutral a{color:#2980b9}.rst-content .admonition-todo p:last-child,.rst-content .admonition p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .note p:last-child,.rst-content .seealso p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.wy-alert p:last-child{margin-bottom:0}.wy-tray-container{position:fixed;bottom:0;left:0;z-index:600}.wy-tray-container li{display:block;width:300px;background:transparent;color:#fff;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,.1);padding:0 24px;min-width:20%;opacity:0;height:0;line-height:56px;overflow:hidden;-webkit-transition:all .3s ease-in;-moz-transition:all .3s ease-in;transition:all .3s ease-in}.wy-tray-container li.wy-tray-item-success{background:#27ae60}.wy-tray-container li.wy-tray-item-info{background:#2980b9}.wy-tray-container li.wy-tray-item-warning{background:#e67e22}.wy-tray-container li.wy-tray-item-danger{background:#e74c3c}.wy-tray-container li.on{opacity:1;height:56px}@media screen and (max-width:768px){.wy-tray-container{bottom:auto;top:0;width:100%}.wy-tray-container li{width:100%}}button{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle;cursor:pointer;line-height:normal;-webkit-appearance:button;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}.btn{display:inline-block;border-radius:2px;line-height:normal;white-space:nowrap;text-align:center;cursor:pointer;font-size:100%;padding:6px 12px 8px;color:#fff;border:1px solid rgba(0,0,0,.1);background-color:#27ae60;text-decoration:none;font-weight:400;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 2px -1px hsla(0,0%,100%,.5),inset 0 -2px 0 0 rgba(0,0,0,.1);outline-none:false;vertical-align:middle;*display:inline;zoom:1;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-webkit-transition:all .1s linear;-moz-transition:all .1s linear;transition:all .1s linear}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;outline:0}.btn:active{box-shadow:inset 0 -1px 0 0 rgba(0,0,0,.05),inset 0 2px 0 0 rgba(0,0,0,.1);padding:8px 12px 6px}.btn:visited{color:#fff}.btn-disabled,.btn-disabled:active,.btn-disabled:focus,.btn-disabled:hover,.btn:disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980b9!important}.btn-info:hover{background-color:#2e8ece!important}.btn-neutral{background-color:#f3f6f6!important;color:#404040!important}.btn-neutral:hover{background-color:#e5ebeb!important;color:#404040}.btn-neutral:visited{color:#404040!important}.btn-success{background-color:#27ae60!important}.btn-success:hover{background-color:#295!important}.btn-danger{background-color:#e74c3c!important}.btn-danger:hover{background-color:#ea6153!important}.btn-warning{background-color:#e67e22!important}.btn-warning:hover{background-color:#e98b39!important}.btn-invert{background-color:#222}.btn-invert:hover{background-color:#2f2f2f!important}.btn-link{background-color:transparent!important;color:#2980b9;box-shadow:none;border-color:transparent!important}.btn-link:active,.btn-link:hover{background-color:transparent!important;color:#409ad5!important;box-shadow:none}.btn-link:visited{color:#9b59b6}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:after,.wy-btn-group:before{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown-active .wy-dropdown-menu{display:block}.wy-dropdown-menu{position:absolute;left:0;display:none;float:left;top:100%;min-width:100%;background:#fcfcfc;z-index:100;border:1px solid #cfd7dd;box-shadow:0 2px 2px 0 rgba(0,0,0,.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px;cursor:pointer}.wy-dropdown-menu>dd>a:hover{background:#2980b9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:1px solid #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type=search]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-up .wy-dropdown-menu{bottom:100%;top:auto;left:auto;right:0}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980b9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;left:auto;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned .wy-help-inline,.wy-form-aligned input,.wy-form-aligned label,.wy-form-aligned select,.wy-form-aligned textarea{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:6px 12px 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:6px}fieldset{margin:0}fieldset,legend{border:0;padding:0}legend{width:100%;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label,legend{display:block}label{margin:0 0 .3125em;color:#333;font-size:90%}input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}.wy-control-group{margin-bottom:24px;max-width:1200px;margin-left:auto;margin-right:auto;*zoom:1}.wy-control-group:after,.wy-control-group:before{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-required>label:after{content:" *";color:#e74c3c}.wy-control-group .wy-form-full,.wy-control-group .wy-form-halves,.wy-control-group .wy-form-thirds{padding-bottom:12px}.wy-control-group .wy-form-full input[type=color],.wy-control-group .wy-form-full input[type=date],.wy-control-group .wy-form-full input[type=datetime-local],.wy-control-group .wy-form-full input[type=datetime],.wy-control-group .wy-form-full input[type=email],.wy-control-group .wy-form-full input[type=month],.wy-control-group .wy-form-full input[type=number],.wy-control-group .wy-form-full input[type=password],.wy-control-group .wy-form-full input[type=search],.wy-control-group .wy-form-full input[type=tel],.wy-control-group .wy-form-full input[type=text],.wy-control-group .wy-form-full input[type=time],.wy-control-group .wy-form-full input[type=url],.wy-control-group .wy-form-full input[type=week],.wy-control-group .wy-form-full select,.wy-control-group .wy-form-halves input[type=color],.wy-control-group .wy-form-halves input[type=date],.wy-control-group .wy-form-halves input[type=datetime-local],.wy-control-group .wy-form-halves input[type=datetime],.wy-control-group .wy-form-halves input[type=email],.wy-control-group .wy-form-halves input[type=month],.wy-control-group .wy-form-halves input[type=number],.wy-control-group .wy-form-halves input[type=password],.wy-control-group .wy-form-halves input[type=search],.wy-control-group .wy-form-halves input[type=tel],.wy-control-group .wy-form-halves input[type=text],.wy-control-group .wy-form-halves input[type=time],.wy-control-group .wy-form-halves input[type=url],.wy-control-group .wy-form-halves input[type=week],.wy-control-group .wy-form-halves select,.wy-control-group .wy-form-thirds input[type=color],.wy-control-group .wy-form-thirds input[type=date],.wy-control-group .wy-form-thirds input[type=datetime-local],.wy-control-group .wy-form-thirds input[type=datetime],.wy-control-group .wy-form-thirds input[type=email],.wy-control-group .wy-form-thirds input[type=month],.wy-control-group .wy-form-thirds input[type=number],.wy-control-group .wy-form-thirds input[type=password],.wy-control-group .wy-form-thirds input[type=search],.wy-control-group .wy-form-thirds input[type=tel],.wy-control-group .wy-form-thirds input[type=text],.wy-control-group .wy-form-thirds input[type=time],.wy-control-group .wy-form-thirds input[type=url],.wy-control-group .wy-form-thirds input[type=week],.wy-control-group .wy-form-thirds select{width:100%}.wy-control-group .wy-form-full{float:left;display:block;width:100%;margin-right:0}.wy-control-group .wy-form-full:last-child{margin-right:0}.wy-control-group .wy-form-halves{float:left;display:block;margin-right:2.35765%;width:48.82117%}.wy-control-group .wy-form-halves:last-child,.wy-control-group .wy-form-halves:nth-of-type(2n){margin-right:0}.wy-control-group .wy-form-halves:nth-of-type(odd){clear:left}.wy-control-group .wy-form-thirds{float:left;display:block;margin-right:2.35765%;width:31.76157%}.wy-control-group .wy-form-thirds:last-child,.wy-control-group .wy-form-thirds:nth-of-type(3n){margin-right:0}.wy-control-group .wy-form-thirds:nth-of-type(3n+1){clear:left}.wy-control-group.wy-control-group-no-input .wy-control,.wy-control-no-input{margin:6px 0 0;font-size:90%}.wy-control-no-input{display:inline-block}.wy-control-group.fluid-input input[type=color],.wy-control-group.fluid-input input[type=date],.wy-control-group.fluid-input input[type=datetime-local],.wy-control-group.fluid-input input[type=datetime],.wy-control-group.fluid-input input[type=email],.wy-control-group.fluid-input input[type=month],.wy-control-group.fluid-input input[type=number],.wy-control-group.fluid-input input[type=password],.wy-control-group.fluid-input input[type=search],.wy-control-group.fluid-input input[type=tel],.wy-control-group.fluid-input input[type=text],.wy-control-group.fluid-input input[type=time],.wy-control-group.fluid-input input[type=url],.wy-control-group.fluid-input input[type=week]{width:100%}.wy-form-message-inline{padding-left:.3em;color:#666;font-size:90%}.wy-form-message{display:block;color:#999;font-size:70%;margin-top:.3125em;font-style:italic}.wy-form-message p{font-size:inherit;font-style:italic;margin-bottom:6px}.wy-form-message p:last-child{margin-bottom:0}input{line-height:normal}input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;*overflow:visible}input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}input[type=datetime-local]{padding:.34375em .625em}input[disabled]{cursor:default}input[type=checkbox],input[type=radio]{padding:0;margin-right:.3125em;*height:13px;*width:13px}input[type=checkbox],input[type=radio],input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}input[type=color]:focus,input[type=date]:focus,input[type=datetime-local]:focus,input[type=datetime]:focus,input[type=email]:focus,input[type=month]:focus,input[type=number]:focus,input[type=password]:focus,input[type=search]:focus,input[type=tel]:focus,input[type=text]:focus,input[type=time]:focus,input[type=url]:focus,input[type=week]:focus{outline:0;outline:thin dotted\9;border-color:#333}input.no-focus:focus{border-color:#ccc!important}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:thin dotted #333;outline:1px auto #129fea}input[type=color][disabled],input[type=date][disabled],input[type=datetime-local][disabled],input[type=datetime][disabled],input[type=email][disabled],input[type=month][disabled],input[type=number][disabled],input[type=password][disabled],input[type=search][disabled],input[type=tel][disabled],input[type=text][disabled],input[type=time][disabled],input[type=url][disabled],input[type=week][disabled]{cursor:not-allowed;background-color:#fafafa}input:focus:invalid,select:focus:invalid,textarea:focus:invalid{color:#e74c3c;border:1px solid #e74c3c}input:focus:invalid:focus,select:focus:invalid:focus,textarea:focus:invalid:focus{border-color:#e74c3c}input[type=checkbox]:focus:invalid:focus,input[type=file]:focus:invalid:focus,input[type=radio]:focus:invalid:focus{outline-color:#e74c3c}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif}select,textarea{padding:.5em .625em;display:inline-block;border:1px solid #ccc;font-size:80%;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}input[readonly],select[disabled],select[readonly],textarea[disabled],textarea[readonly]{cursor:not-allowed;background-color:#fafafa}input[type=checkbox][disabled],input[type=radio][disabled]{cursor:not-allowed}.wy-checkbox,.wy-radio{margin:6px 0;color:#404040;display:block}.wy-checkbox input,.wy-radio input{vertical-align:baseline}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap;padding:6px}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{line-height:27px;padding:0 8px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:1px solid #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-switch{position:relative;display:block;height:24px;margin-top:12px;cursor:pointer}.wy-switch:before{left:0;top:0;width:36px;height:12px;background:#ccc}.wy-switch:after,.wy-switch:before{position:absolute;content:"";display:block;border-radius:4px;-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.wy-switch:after{width:18px;height:18px;background:#999;left:-3px;top:-3px}.wy-switch span{position:absolute;left:48px;display:block;font-size:12px;color:#ccc;line-height:1}.wy-switch.active:before{background:#1e8449}.wy-switch.active:after{left:24px;background:#27ae60}.wy-switch.disabled{cursor:not-allowed;opacity:.8}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error>label{color:#e74c3c}.wy-control-group.wy-control-group-error input[type=color],.wy-control-group.wy-control-group-error input[type=date],.wy-control-group.wy-control-group-error input[type=datetime-local],.wy-control-group.wy-control-group-error input[type=datetime],.wy-control-group.wy-control-group-error input[type=email],.wy-control-group.wy-control-group-error input[type=month],.wy-control-group.wy-control-group-error input[type=number],.wy-control-group.wy-control-group-error input[type=password],.wy-control-group.wy-control-group-error input[type=search],.wy-control-group.wy-control-group-error input[type=tel],.wy-control-group.wy-control-group-error input[type=text],.wy-control-group.wy-control-group-error input[type=time],.wy-control-group.wy-control-group-error input[type=url],.wy-control-group.wy-control-group-error input[type=week],.wy-control-group.wy-control-group-error textarea{border:1px solid #e74c3c}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:.5em .625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27ae60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#e74c3c}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#e67e22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980b9}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}@media only screen and (max-width:480px){.wy-form button[type=submit]{margin:.7em 0 0}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=text],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week],.wy-form label{margin-bottom:.3em;display:block}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-control{margin:1.5em 0 0}.wy-form-message,.wy-form-message-inline,.wy-form .wy-help-inline{display:block;font-size:80%;padding:6px 0}}@media screen and (max-width:768px){.tablet-hide{display:none}}@media screen and (max-width:480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.rst-content table.docutils,.rst-content table.field-list,.wy-table{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.rst-content table.docutils caption,.rst-content table.field-list caption,.wy-table caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.rst-content table.docutils td,.rst-content table.docutils th,.rst-content table.field-list td,.rst-content table.field-list th,.wy-table td,.wy-table th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.rst-content table.docutils td:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list td:first-child,.rst-content table.field-list th:first-child,.wy-table td:first-child,.wy-table th:first-child{border-left-width:0}.rst-content table.docutils thead,.rst-content table.field-list thead,.wy-table thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.rst-content table.docutils thead th,.rst-content table.field-list thead th,.wy-table thead th{font-weight:700;border-bottom:2px solid #e1e4e5}.rst-content table.docutils td,.rst-content table.field-list td,.wy-table td{background-color:transparent;vertical-align:middle}.rst-content table.docutils td p,.rst-content table.field-list td p,.wy-table td p{line-height:18px}.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child,.wy-table td p:last-child{margin-bottom:0}.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min,.wy-table .wy-table-cell-min{width:1%;padding-right:0}.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:grey;font-size:90%}.wy-table-tertiary{color:grey;font-size:80%}.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td,.wy-table-backed,.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td{background-color:#f3f6f6}.rst-content table.docutils,.wy-table-bordered-all{border:1px solid #e1e4e5}.rst-content table.docutils td,.wy-table-bordered-all td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.rst-content table.docutils tbody>tr:last-child td,.wy-table-bordered-all tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0!important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}a{color:#2980b9;text-decoration:none;cursor:pointer}a:hover{color:#3091d1}a:visited{color:#9b59b6}html{height:100%}body,html{overflow-x:hidden}body{font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;font-weight:400;color:#404040;min-height:100%;background:#edf0f2}.wy-text-left{text-align:left}.wy-text-center{text-align:center}.wy-text-right{text-align:right}.wy-text-large{font-size:120%}.wy-text-normal{font-size:100%}.wy-text-small,small{font-size:80%}.wy-text-strike{text-decoration:line-through}.wy-text-warning{color:#e67e22!important}a.wy-text-warning:hover{color:#eb9950!important}.wy-text-info{color:#2980b9!important}a.wy-text-info:hover{color:#409ad5!important}.wy-text-success{color:#27ae60!important}a.wy-text-success:hover{color:#36d278!important}.wy-text-danger{color:#e74c3c!important}a.wy-text-danger:hover{color:#ed7669!important}.wy-text-neutral{color:#404040!important}a.wy-text-neutral:hover{color:#595959!important}.rst-content .toctree-wrapper>p.caption,h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif}p{line-height:24px;font-size:16px;margin:0 0 24px}h1{font-size:175%}.rst-content .toctree-wrapper>p.caption,h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}hr{display:block;height:1px;border:0;border-top:1px solid #e1e4e5;margin:24px 0;padding:0}.rst-content code,.rst-content tt,code{white-space:nowrap;max-width:100%;background:#fff;border:1px solid #e1e4e5;font-size:75%;padding:0 5px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#e74c3c;overflow-x:auto}.rst-content tt.code-large,code.code-large{font-size:90%}.rst-content .section ul,.rst-content .toctree-wrapper ul,.rst-content section ul,.wy-plain-list-disc,article ul{list-style:disc;line-height:24px;margin-bottom:24px}.rst-content .section ul li,.rst-content .toctree-wrapper ul li,.rst-content section ul li,.wy-plain-list-disc li,article ul li{list-style:disc;margin-left:24px}.rst-content .section ul li p:last-child,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li p:last-child,.rst-content .toctree-wrapper ul li ul,.rst-content section ul li p:last-child,.rst-content section ul li ul,.wy-plain-list-disc li p:last-child,.wy-plain-list-disc li ul,article ul li p:last-child,article ul li ul{margin-bottom:0}.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li,.rst-content section ul li li,.wy-plain-list-disc li li,article ul li li{list-style:circle}.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li,.rst-content section ul li li li,.wy-plain-list-disc li li li,article ul li li li{list-style:square}.rst-content .section ul li ol li,.rst-content .toctree-wrapper ul li ol li,.rst-content section ul li ol li,.wy-plain-list-disc li ol li,article ul li ol li{list-style:decimal}.rst-content .section ol,.rst-content .section ol.arabic,.rst-content .toctree-wrapper ol,.rst-content .toctree-wrapper ol.arabic,.rst-content section ol,.rst-content section ol.arabic,.wy-plain-list-decimal,article ol{list-style:decimal;line-height:24px;margin-bottom:24px}.rst-content .section ol.arabic li,.rst-content .section ol li,.rst-content .toctree-wrapper ol.arabic li,.rst-content .toctree-wrapper ol li,.rst-content section ol.arabic li,.rst-content section ol li,.wy-plain-list-decimal li,article ol li{list-style:decimal;margin-left:24px}.rst-content .section ol.arabic li ul,.rst-content .section ol li p:last-child,.rst-content .section ol li ul,.rst-content .toctree-wrapper ol.arabic li ul,.rst-content .toctree-wrapper ol li p:last-child,.rst-content .toctree-wrapper ol li ul,.rst-content section ol.arabic li ul,.rst-content section ol li p:last-child,.rst-content section ol li ul,.wy-plain-list-decimal li p:last-child,.wy-plain-list-decimal li ul,article ol li p:last-child,article ol li ul{margin-bottom:0}.rst-content .section ol.arabic li ul li,.rst-content .section ol li ul li,.rst-content .toctree-wrapper ol.arabic li ul li,.rst-content .toctree-wrapper ol li ul li,.rst-content section ol.arabic li ul li,.rst-content section ol li ul li,.wy-plain-list-decimal li ul li,article ol li ul li{list-style:disc}.wy-breadcrumbs{*zoom:1}.wy-breadcrumbs:after,.wy-breadcrumbs:before{display:table;content:""}.wy-breadcrumbs:after{clear:both}.wy-breadcrumbs>li{display:inline-block;padding-top:5px}.wy-breadcrumbs>li.wy-breadcrumbs-aside{float:right}.rst-content .wy-breadcrumbs>li code,.rst-content .wy-breadcrumbs>li tt,.wy-breadcrumbs>li .rst-content tt,.wy-breadcrumbs>li code{all:inherit;color:inherit}.breadcrumb-item:before{content:"/";color:#bbb;font-size:13px;padding:0 6px 0 3px}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width:480px){.wy-breadcrumbs-extra,.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}html{font-size:16px}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:after,.wy-menu-horiz:before{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz li,.wy-menu-horiz ul{display:inline-block}.wy-menu-horiz li:hover{background:hsla(0,0%,100%,.1)}.wy-menu-horiz li.divide-left{border-left:1px solid #404040}.wy-menu-horiz li.divide-right{border-right:1px solid #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical{width:300px}.wy-menu-vertical header,.wy-menu-vertical p.caption{color:#55a5d9;height:32px;line-height:32px;padding:0 1.618em;margin:12px 0 0;display:block;font-weight:700;text-transform:uppercase;font-size:85%;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:1px solid #404040}.wy-menu-vertical li.divide-bottom{border-bottom:1px solid #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:grey;border-right:1px solid #c9c9c9;padding:.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.rst-content .wy-menu-vertical li tt,.wy-menu-vertical li .rst-content tt,.wy-menu-vertical li code{border:none;background:inherit;color:inherit;padding-left:0;padding-right:0}.wy-menu-vertical li button.toctree-expand{display:block;float:left;margin-left:-1.2em;line-height:18px;color:#4d4d4d;border:none;background:none;padding:0}.wy-menu-vertical li.current>a,.wy-menu-vertical li.on a{color:#404040;font-weight:700;position:relative;background:#fcfcfc;border:none;padding:.4045em 1.618em}.wy-menu-vertical li.current>a:hover,.wy-menu-vertical li.on a:hover{background:#fcfcfc}.wy-menu-vertical li.current>a:hover button.toctree-expand,.wy-menu-vertical li.on a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand{display:block;line-height:18px;color:#333}.wy-menu-vertical li.toctree-l1.current>a{border-bottom:1px solid #c9c9c9;border-top:1px solid #c9c9c9}.wy-menu-vertical .toctree-l1.current .toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .toctree-l11>ul{display:none}.wy-menu-vertical .toctree-l1.current .current.toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .current.toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .current.toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .current.toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .current.toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .current.toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .current.toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .current.toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .current.toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .current.toctree-l11>ul{display:block}.wy-menu-vertical li.toctree-l3,.wy-menu-vertical li.toctree-l4{font-size:.9em}.wy-menu-vertical li.toctree-l2 a,.wy-menu-vertical li.toctree-l3 a,.wy-menu-vertical li.toctree-l4 a,.wy-menu-vertical li.toctree-l5 a,.wy-menu-vertical li.toctree-l6 a,.wy-menu-vertical li.toctree-l7 a,.wy-menu-vertical li.toctree-l8 a,.wy-menu-vertical li.toctree-l9 a,.wy-menu-vertical li.toctree-l10 a{color:#404040}.wy-menu-vertical li.toctree-l2 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l3 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l4 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l5 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l6 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l7 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l8 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l9 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l10 a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a,.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a,.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a,.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a,.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a,.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a,.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a,.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{display:block}.wy-menu-vertical li.toctree-l2.current>a{padding:.4045em 2.427em}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{padding:.4045em 1.618em .4045em 4.045em}.wy-menu-vertical li.toctree-l3.current>a{padding:.4045em 4.045em}.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{padding:.4045em 1.618em .4045em 5.663em}.wy-menu-vertical li.toctree-l4.current>a{padding:.4045em 5.663em}.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a{padding:.4045em 1.618em .4045em 7.281em}.wy-menu-vertical li.toctree-l5.current>a{padding:.4045em 7.281em}.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a{padding:.4045em 1.618em .4045em 8.899em}.wy-menu-vertical li.toctree-l6.current>a{padding:.4045em 8.899em}.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a{padding:.4045em 1.618em .4045em 10.517em}.wy-menu-vertical li.toctree-l7.current>a{padding:.4045em 10.517em}.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a{padding:.4045em 1.618em .4045em 12.135em}.wy-menu-vertical li.toctree-l8.current>a{padding:.4045em 12.135em}.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a{padding:.4045em 1.618em .4045em 13.753em}.wy-menu-vertical li.toctree-l9.current>a{padding:.4045em 13.753em}.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a{padding:.4045em 1.618em .4045em 15.371em}.wy-menu-vertical li.toctree-l10.current>a{padding:.4045em 15.371em}.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{padding:.4045em 1.618em .4045em 16.989em}.wy-menu-vertical li.toctree-l2.current>a,.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{background:#c9c9c9}.wy-menu-vertical li.toctree-l2 button.toctree-expand{color:#a3a3a3}.wy-menu-vertical li.toctree-l3.current>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{background:#bdbdbd}.wy-menu-vertical li.toctree-l3 button.toctree-expand{color:#969696}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical li ul li a{margin-bottom:0;color:#d9d9d9;font-weight:400}.wy-menu-vertical a{line-height:18px;padding:.4045em 1.618em;display:block;position:relative;font-size:90%;color:#d9d9d9}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:hover button.toctree-expand{color:#d9d9d9}.wy-menu-vertical a:active{background-color:#2980b9;cursor:pointer;color:#fff}.wy-menu-vertical a:active button.toctree-expand{color:#fff}.wy-side-nav-search{display:block;width:300px;padding:.809em;margin-bottom:.809em;z-index:200;background-color:#2980b9;text-align:center;color:#fcfcfc}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto .809em;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-side-nav-search .wy-dropdown>a,.wy-side-nav-search>a{color:#fcfcfc;font-size:100%;font-weight:700;display:inline-block;padding:4px 6px;margin-bottom:.809em;max-width:100%}.wy-side-nav-search .wy-dropdown>a:hover,.wy-side-nav-search>a:hover{background:hsla(0,0%,100%,.1)}.wy-side-nav-search .wy-dropdown>a img.logo,.wy-side-nav-search>a img.logo{display:block;margin:0 auto;height:auto;width:auto;border-radius:0;max-width:100%;background:transparent}.wy-side-nav-search .wy-dropdown>a.icon img.logo,.wy-side-nav-search>a.icon img.logo{margin-top:.85em}.wy-side-nav-search>div.version{margin-top:-.4045em;margin-bottom:.809em;font-weight:400;color:hsla(0,0%,100%,.3)}.wy-nav .wy-menu-vertical header{color:#2980b9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980b9;color:#fff}[data-menu-wrap]{-webkit-transition:all .2s ease-in;-moz-transition:all .2s ease-in;transition:all .2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:#fcfcfc}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:fixed;top:0;bottom:0;left:0;padding-bottom:2em;width:300px;overflow-x:hidden;overflow-y:hidden;min-height:100%;color:#9b9b9b;background:#343131;z-index:200}.wy-side-scroll{width:320px;position:relative;overflow-x:hidden;overflow-y:scroll;height:100%}.wy-nav-top{display:none;background:#2980b9;color:#fff;padding:.4045em .809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:after,.wy-nav-top:before{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:700}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer;padding-top:inherit}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:grey}footer p{margin-bottom:12px}.rst-content footer span.commit tt,footer span.commit .rst-content tt,footer span.commit code{padding:0;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:1em;background:none;border:none;color:grey}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:after,.rst-footer-buttons:before{width:100%;display:table;content:""}.rst-footer-buttons:after{clear:both}.rst-breadcrumbs-buttons{margin-top:12px;*zoom:1}.rst-breadcrumbs-buttons:after,.rst-breadcrumbs-buttons:before{display:table;content:""}.rst-breadcrumbs-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:1px solid #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:1px solid #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:grey;font-size:90%}.genindextable li>ul{margin-left:24px}@media screen and (max-width:768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-menu.wy-menu-vertical,.wy-side-nav-search,.wy-side-scroll{width:auto}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width:1100px){.wy-nav-content-wrap{background:rgba(0,0,0,.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.rst-versions,.wy-nav-side,footer{display:none}.wy-nav-content-wrap{margin-left:0}}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:after,.rst-versions .rst-current-version:before{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-content .code-block-caption .rst-versions .rst-current-version .headerlink,.rst-content .eqno .rst-versions .rst-current-version .headerlink,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-content code.download .rst-versions .rst-current-version span:first-child,.rst-content dl dt .rst-versions .rst-current-version .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-content p .rst-versions .rst-current-version .headerlink,.rst-content table>caption .rst-versions .rst-current-version .headerlink,.rst-content tt.download .rst-versions .rst-current-version span:first-child,.rst-versions .rst-current-version .fa,.rst-versions .rst-current-version .icon,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-versions .rst-current-version .rst-content .code-block-caption .headerlink,.rst-versions .rst-current-version .rst-content .eqno .headerlink,.rst-versions .rst-current-version .rst-content code.download span:first-child,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-versions .rst-current-version .rst-content p .headerlink,.rst-versions .rst-current-version .rst-content table>caption .headerlink,.rst-versions .rst-current-version .rst-content tt.download span:first-child,.rst-versions .rst-current-version .wy-menu-vertical li button.toctree-expand,.wy-menu-vertical li .rst-versions .rst-current-version button.toctree-expand{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}.rst-content .toctree-wrapper>p.caption,.rst-content h1,.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6{margin-bottom:24px}.rst-content img{max-width:100%;height:auto}.rst-content div.figure,.rst-content figure{margin-bottom:24px}.rst-content div.figure .caption-text,.rst-content figure .caption-text{font-style:italic}.rst-content div.figure p:last-child.caption,.rst-content figure p:last-child.caption{margin-bottom:0}.rst-content div.figure.align-center,.rst-content figure.align-center{text-align:center}.rst-content .section>a>img,.rst-content .section>img,.rst-content section>a>img,.rst-content section>img{margin-bottom:24px}.rst-content abbr[title]{text-decoration:none}.rst-content.style-external-links a.reference.external:after{font-family:FontAwesome;content:"\f08e";color:#b3b3b3;vertical-align:super;font-size:60%;margin:0 .2em}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content pre.literal-block{white-space:pre;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;display:block;overflow:auto}.rst-content div[class^=highlight],.rst-content pre.literal-block{border:1px solid #e1e4e5;overflow-x:auto;margin:1px 0 24px}.rst-content div[class^=highlight] div[class^=highlight],.rst-content pre.literal-block div[class^=highlight]{padding:0;border:none;margin:0}.rst-content div[class^=highlight] td.code{width:100%}.rst-content .linenodiv pre{border-right:1px solid #e6e9ea;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;user-select:none;pointer-events:none}.rst-content div[class^=highlight] pre{white-space:pre;margin:0;padding:12px;display:block;overflow:auto}.rst-content div[class^=highlight] pre .hll{display:block;margin:0 -12px;padding:0 12px}.rst-content .linenodiv pre,.rst-content div[class^=highlight] pre,.rst-content pre.literal-block{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:12px;line-height:1.4}.rst-content div.highlight .gp,.rst-content div.highlight span.linenos{user-select:none;pointer-events:none}.rst-content div.highlight span.linenos{display:inline-block;padding-left:0;padding-right:12px;margin-right:12px;border-right:1px solid #e6e9ea}.rst-content .code-block-caption{font-style:italic;font-size:85%;line-height:1;padding:1em 0;text-align:center}@media print{.rst-content .codeblock,.rst-content div[class^=highlight],.rst-content div[class^=highlight] pre{white-space:pre-wrap}}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning{clear:both}.rst-content .admonition-todo .last,.rst-content .admonition-todo>:last-child,.rst-content .admonition .last,.rst-content .admonition>:last-child,.rst-content .attention .last,.rst-content .attention>:last-child,.rst-content .caution .last,.rst-content .caution>:last-child,.rst-content .danger .last,.rst-content .danger>:last-child,.rst-content .error .last,.rst-content .error>:last-child,.rst-content .hint .last,.rst-content .hint>:last-child,.rst-content .important .last,.rst-content .important>:last-child,.rst-content .note .last,.rst-content .note>:last-child,.rst-content .seealso .last,.rst-content .seealso>:last-child,.rst-content .tip .last,.rst-content .tip>:last-child,.rst-content .warning .last,.rst-content .warning>:last-child{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent!important;border-color:rgba(0,0,0,.1)!important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha>li,.rst-content .toctree-wrapper ol.loweralpha,.rst-content .toctree-wrapper ol.loweralpha>li,.rst-content section ol.loweralpha,.rst-content section ol.loweralpha>li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha>li,.rst-content .toctree-wrapper ol.upperalpha,.rst-content .toctree-wrapper ol.upperalpha>li,.rst-content section ol.upperalpha,.rst-content section ol.upperalpha>li{list-style:upper-alpha}.rst-content .section ol li>*,.rst-content .section ul li>*,.rst-content .toctree-wrapper ol li>*,.rst-content .toctree-wrapper ul li>*,.rst-content section ol li>*,.rst-content section ul li>*{margin-top:12px;margin-bottom:12px}.rst-content .section ol li>:first-child,.rst-content .section ul li>:first-child,.rst-content .toctree-wrapper ol li>:first-child,.rst-content .toctree-wrapper ul li>:first-child,.rst-content section ol li>:first-child,.rst-content section ul li>:first-child{margin-top:0}.rst-content .section ol li>p,.rst-content .section ol li>p:last-child,.rst-content .section ul li>p,.rst-content .section ul li>p:last-child,.rst-content .toctree-wrapper ol li>p,.rst-content .toctree-wrapper ol li>p:last-child,.rst-content .toctree-wrapper ul li>p,.rst-content .toctree-wrapper ul li>p:last-child,.rst-content section ol li>p,.rst-content section ol li>p:last-child,.rst-content section ul li>p,.rst-content section ul li>p:last-child{margin-bottom:12px}.rst-content .section ol li>p:only-child,.rst-content .section ol li>p:only-child:last-child,.rst-content .section ul li>p:only-child,.rst-content .section ul li>p:only-child:last-child,.rst-content .toctree-wrapper ol li>p:only-child,.rst-content .toctree-wrapper ol li>p:only-child:last-child,.rst-content .toctree-wrapper ul li>p:only-child,.rst-content .toctree-wrapper ul li>p:only-child:last-child,.rst-content section ol li>p:only-child,.rst-content section ol li>p:only-child:last-child,.rst-content section ul li>p:only-child,.rst-content section ul li>p:only-child:last-child{margin-bottom:0}.rst-content .section ol li>ol,.rst-content .section ol li>ul,.rst-content .section ul li>ol,.rst-content .section ul li>ul,.rst-content .toctree-wrapper ol li>ol,.rst-content .toctree-wrapper ol li>ul,.rst-content .toctree-wrapper ul li>ol,.rst-content .toctree-wrapper ul li>ul,.rst-content section ol li>ol,.rst-content section ol li>ul,.rst-content section ul li>ol,.rst-content section ul li>ul{margin-bottom:12px}.rst-content .section ol.simple li>*,.rst-content .section ol.simple li ol,.rst-content .section ol.simple li ul,.rst-content .section ul.simple li>*,.rst-content .section ul.simple li ol,.rst-content .section ul.simple li ul,.rst-content .toctree-wrapper ol.simple li>*,.rst-content .toctree-wrapper ol.simple li ol,.rst-content .toctree-wrapper ol.simple li ul,.rst-content .toctree-wrapper ul.simple li>*,.rst-content .toctree-wrapper ul.simple li ol,.rst-content .toctree-wrapper ul.simple li ul,.rst-content section ol.simple li>*,.rst-content section ol.simple li ol,.rst-content section ol.simple li ul,.rst-content section ul.simple li>*,.rst-content section ul.simple li ol,.rst-content section ul.simple li ul{margin-top:0;margin-bottom:0}.rst-content .line-block{margin-left:0;margin-bottom:24px;line-height:24px}.rst-content .line-block .line-block{margin-left:24px;margin-bottom:0}.rst-content .topic-title{font-weight:700;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0 0 24px 24px}.rst-content .align-left{float:left;margin:0 24px 24px 0}.rst-content .align-center{margin:auto}.rst-content .align-center:not(table){display:block}.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink{opacity:0;font-size:14px;font-family:FontAwesome;margin-left:.5em}.rst-content .code-block-caption .headerlink:focus,.rst-content .code-block-caption:hover .headerlink,.rst-content .eqno .headerlink:focus,.rst-content .eqno:hover .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink:focus,.rst-content .toctree-wrapper>p.caption:hover .headerlink,.rst-content dl dt .headerlink:focus,.rst-content dl dt:hover .headerlink,.rst-content h1 .headerlink:focus,.rst-content h1:hover .headerlink,.rst-content h2 .headerlink:focus,.rst-content h2:hover .headerlink,.rst-content h3 .headerlink:focus,.rst-content h3:hover .headerlink,.rst-content h4 .headerlink:focus,.rst-content h4:hover .headerlink,.rst-content h5 .headerlink:focus,.rst-content h5:hover .headerlink,.rst-content h6 .headerlink:focus,.rst-content h6:hover .headerlink,.rst-content p.caption .headerlink:focus,.rst-content p.caption:hover .headerlink,.rst-content p .headerlink:focus,.rst-content p:hover .headerlink,.rst-content table>caption .headerlink:focus,.rst-content table>caption:hover .headerlink{opacity:1}.rst-content p a{overflow-wrap:anywhere}.rst-content .wy-table td p,.rst-content .wy-table td ul,.rst-content .wy-table th p,.rst-content .wy-table th ul,.rst-content table.docutils td p,.rst-content table.docutils td ul,.rst-content table.docutils th p,.rst-content table.docutils th ul,.rst-content table.field-list td p,.rst-content table.field-list td ul,.rst-content table.field-list th p,.rst-content table.field-list th ul{font-size:inherit}.rst-content .btn:focus{outline:2px solid}.rst-content table>caption .headerlink:after{font-size:12px}.rst-content .centered{text-align:center}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:1px solid #e1e4e5}.rst-content .sidebar dl,.rst-content .sidebar p,.rst-content .sidebar ul{font-size:90%}.rst-content .sidebar .last,.rst-content .sidebar>:last-child{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif;font-weight:700;background:#e1e4e5;padding:6px 12px;margin:-24px -24px 24px;font-size:100%}.rst-content .highlighted{background:#f1c40f;box-shadow:0 0 0 2px #f1c40f;display:inline;font-weight:700}.rst-content .citation-reference,.rst-content .footnote-reference{vertical-align:baseline;position:relative;top:-.4em;line-height:0;font-size:90%}.rst-content .citation-reference>span.fn-bracket,.rst-content .footnote-reference>span.fn-bracket{display:none}.rst-content .hlist{width:100%}.rst-content dl dt span.classifier:before{content:" : "}.rst-content dl dt span.classifier-delimiter{display:none!important}html.writer-html4 .rst-content table.docutils.citation,html.writer-html4 .rst-content table.docutils.footnote{background:none;border:none}html.writer-html4 .rst-content table.docutils.citation td,html.writer-html4 .rst-content table.docutils.citation tr,html.writer-html4 .rst-content table.docutils.footnote td,html.writer-html4 .rst-content table.docutils.footnote tr{border:none;background-color:transparent!important;white-space:normal}html.writer-html4 .rst-content table.docutils.citation td.label,html.writer-html4 .rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{display:grid;grid-template-columns:auto minmax(80%,95%)}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{display:inline-grid;grid-template-columns:max-content auto}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{display:grid;grid-template-columns:auto auto minmax(.65rem,auto) minmax(40%,95%)}html.writer-html5 .rst-content aside.citation>span.label,html.writer-html5 .rst-content aside.footnote>span.label,html.writer-html5 .rst-content div.citation>span.label{grid-column-start:1;grid-column-end:2}html.writer-html5 .rst-content aside.citation>span.backrefs,html.writer-html5 .rst-content aside.footnote>span.backrefs,html.writer-html5 .rst-content div.citation>span.backrefs{grid-column-start:2;grid-column-end:3;grid-row-start:1;grid-row-end:3}html.writer-html5 .rst-content aside.citation>p,html.writer-html5 .rst-content aside.footnote>p,html.writer-html5 .rst-content div.citation>p{grid-column-start:4;grid-column-end:5}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{margin-bottom:24px}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{padding-left:1rem}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dd,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dd,html.writer-html5 .rst-content dl.footnote>dt{margin-bottom:0}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{font-size:.9rem}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.footnote>dt{margin:0 .5rem .5rem 0;line-height:1.2rem;word-break:break-all;font-weight:400}html.writer-html5 .rst-content dl.citation>dt>span.brackets:before,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:before{content:"["}html.writer-html5 .rst-content dl.citation>dt>span.brackets:after,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:after{content:"]"}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a{word-break:keep-all}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a:not(:first-child):before,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.footnote>dd{margin:0 0 .5rem;line-height:1.2rem}html.writer-html5 .rst-content dl.citation>dd p,html.writer-html5 .rst-content dl.footnote>dd p{font-size:.9rem}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{padding-left:1rem;padding-right:1rem;font-size:.9rem;line-height:1.2rem}html.writer-html5 .rst-content aside.citation p,html.writer-html5 .rst-content aside.footnote p,html.writer-html5 .rst-content div.citation p{font-size:.9rem;line-height:1.2rem;margin-bottom:12px}html.writer-html5 .rst-content aside.citation span.backrefs,html.writer-html5 .rst-content aside.footnote span.backrefs,html.writer-html5 .rst-content div.citation span.backrefs{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content aside.citation span.backrefs>a,html.writer-html5 .rst-content aside.footnote span.backrefs>a,html.writer-html5 .rst-content div.citation span.backrefs>a{word-break:keep-all}html.writer-html5 .rst-content aside.citation span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content aside.footnote span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content div.citation span.backrefs>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content aside.citation span.label,html.writer-html5 .rst-content aside.footnote span.label,html.writer-html5 .rst-content div.citation span.label{line-height:1.2rem}html.writer-html5 .rst-content aside.citation-list,html.writer-html5 .rst-content aside.footnote-list,html.writer-html5 .rst-content div.citation-list{margin-bottom:24px}html.writer-html5 .rst-content dl.option-list kbd{font-size:.9rem}.rst-content table.docutils.footnote,html.writer-html4 .rst-content table.docutils.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content aside.footnote-list aside.footnote,html.writer-html5 .rst-content div.citation-list>div.citation,html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{color:grey}.rst-content table.docutils.footnote code,.rst-content table.docutils.footnote tt,html.writer-html4 .rst-content table.docutils.citation code,html.writer-html4 .rst-content table.docutils.citation tt,html.writer-html5 .rst-content aside.footnote-list aside.footnote code,html.writer-html5 .rst-content aside.footnote-list aside.footnote tt,html.writer-html5 .rst-content aside.footnote code,html.writer-html5 .rst-content aside.footnote tt,html.writer-html5 .rst-content div.citation-list>div.citation code,html.writer-html5 .rst-content div.citation-list>div.citation tt,html.writer-html5 .rst-content dl.citation code,html.writer-html5 .rst-content dl.citation tt,html.writer-html5 .rst-content dl.footnote code,html.writer-html5 .rst-content dl.footnote tt{color:#555}.rst-content .wy-table-responsive.citation,.rst-content .wy-table-responsive.footnote{margin-bottom:0}.rst-content .wy-table-responsive.citation+:not(.citation),.rst-content .wy-table-responsive.footnote+:not(.footnote){margin-top:24px}.rst-content .wy-table-responsive.citation:last-child,.rst-content .wy-table-responsive.footnote:last-child{margin-bottom:24px}.rst-content table.docutils th{border-color:#e1e4e5}html.writer-html5 .rst-content table.docutils th{border:1px solid #e1e4e5}html.writer-html5 .rst-content table.docutils td>p,html.writer-html5 .rst-content table.docutils th>p{line-height:1rem;margin-bottom:0;font-size:.9rem}.rst-content table.docutils td .last,.rst-content table.docutils td .last>:last-child{margin-bottom:0}.rst-content table.field-list,.rst-content table.field-list td{border:none}.rst-content table.field-list td p{line-height:inherit}.rst-content table.field-list td>strong{display:inline-block}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left}.rst-content code,.rst-content tt{color:#000;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;padding:2px 5px}.rst-content code big,.rst-content code em,.rst-content tt big,.rst-content tt em{font-size:100%!important;line-height:normal}.rst-content code.literal,.rst-content tt.literal{color:#e74c3c;white-space:normal}.rst-content code.xref,.rst-content tt.xref,a .rst-content code,a .rst-content tt{font-weight:700;color:#404040;overflow-wrap:normal}.rst-content kbd,.rst-content pre,.rst-content samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace}.rst-content a code,.rst-content a tt{color:#2980b9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:700;margin-bottom:12px}.rst-content dl ol,.rst-content dl p,.rst-content dl table,.rst-content dl ul{margin-bottom:12px}.rst-content dl dd{margin:0 0 12px 24px;line-height:24px}.rst-content dl dd>ol:last-child,.rst-content dl dd>p:last-child,.rst-content dl dd>table:last-child,.rst-content dl dd>ul:last-child{margin-bottom:0}html.writer-html4 .rst-content dl:not(.docutils),html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple){margin-bottom:24px}html.writer-html4 .rst-content dl:not(.docutils)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{display:table;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980b9;border-top:3px solid #6ab0de;padding:6px;position:relative}html.writer-html4 .rst-content dl:not(.docutils)>dt:before,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:before{color:#6ab0de}html.writer-html4 .rst-content dl:not(.docutils)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{margin-bottom:6px;border:none;border-left:3px solid #ccc;background:#f0f0f0;color:#555}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils)>dt:first-child,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:first-child{margin-top:0}html.writer-html4 .rst-content dl:not(.docutils) code.descclassname,html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descclassname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{background-color:transparent;border:none;padding:0;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .optional,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .property,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .property{display:inline-block;padding-right:8px;max-width:100%}html.writer-html4 .rst-content dl:not(.docutils) .k,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .k{font-style:italic}html.writer-html4 .rst-content dl:not(.docutils) .descclassname,html.writer-html4 .rst-content dl:not(.docutils) .descname,html.writer-html4 .rst-content dl:not(.docutils) .sig-name,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .sig-name{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#000}.rst-content .viewcode-back,.rst-content .viewcode-link{display:inline-block;color:#27ae60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}.rst-content p.rubric{margin-bottom:12px;font-weight:700}.rst-content code.download,.rst-content tt.download{background:inherit;padding:inherit;font-weight:400;font-family:inherit;font-size:inherit;color:inherit;border:inherit;white-space:inherit}.rst-content code.download span:first-child,.rst-content tt.download span:first-child{-webkit-font-smoothing:subpixel-antialiased}.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{margin-right:4px}.rst-content .guilabel,.rst-content .menuselection{font-size:80%;font-weight:700;border-radius:4px;padding:2.4px 6px;margin:auto 2px}.rst-content .guilabel,.rst-content .menuselection{border:1px solid #7fbbe3;background:#e7f2fa}.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>.kbd,.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>kbd{color:inherit;font-size:80%;background-color:#fff;border:1px solid #a6a6a6;border-radius:4px;box-shadow:0 2px grey;padding:2.4px 6px;margin:auto 0}.rst-content .versionmodified{font-style:italic}@media screen and (max-width:480px){.rst-content .sidebar{width:100%}}span[id*=MathJax-Span]{color:#404040}.math{text-align:center}@font-face{font-family:Lato;src:url(fonts/lato-normal.woff2?bd03a2cc277bbbc338d464e679fe9942) format("woff2"),url(fonts/lato-normal.woff?27bd77b9162d388cb8d4c4217c7c5e2a) format("woff");font-weight:400;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold.woff2?cccb897485813c7c256901dbca54ecf2) format("woff2"),url(fonts/lato-bold.woff?d878b6c29b10beca227e9eef4246111b) format("woff");font-weight:700;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold-italic.woff2?0b6bb6725576b072c5d0b02ecdd1900d) format("woff2"),url(fonts/lato-bold-italic.woff?9c7e4e9eb485b4a121c760e61bc3707c) format("woff");font-weight:700;font-style:italic;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-normal-italic.woff2?4eb103b4d12be57cb1d040ed5e162e9d) format("woff2"),url(fonts/lato-normal-italic.woff?f28f2d6482446544ef1ea1ccc6dd5892) format("woff");font-weight:400;font-style:italic;font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:400;src:url(fonts/Roboto-Slab-Regular.woff2?7abf5b8d04d26a2cafea937019bca958) format("woff2"),url(fonts/Roboto-Slab-Regular.woff?c1be9284088d487c5e3ff0a10a92e58c) format("woff");font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:700;src:url(fonts/Roboto-Slab-Bold.woff2?9984f4a9bda09be08e83f2506954adbe) format("woff2"),url(fonts/Roboto-Slab-Bold.woff?bed5564a116b05148e3b3bea6fb1162a) format("woff");font-display:block} \ No newline at end of file + */@font-face{font-family:FontAwesome;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713);src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix&v=4.7.0) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#fontawesomeregular) format("svg");font-weight:400;font-style:normal}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14286em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14286em;width:2.14286em;top:.14286em;text-align:center}.fa-li.fa-lg{left:-1.85714em}.fa-border{padding:.2em .25em .15em;border:.08em solid #eee;border-radius:.1em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa-pull-left.icon,.fa.fa-pull-left,.rst-content .code-block-caption .fa-pull-left.headerlink,.rst-content .eqno .fa-pull-left.headerlink,.rst-content .fa-pull-left.admonition-title,.rst-content code.download span.fa-pull-left:first-child,.rst-content dl dt .fa-pull-left.headerlink,.rst-content h1 .fa-pull-left.headerlink,.rst-content h2 .fa-pull-left.headerlink,.rst-content h3 .fa-pull-left.headerlink,.rst-content h4 .fa-pull-left.headerlink,.rst-content h5 .fa-pull-left.headerlink,.rst-content h6 .fa-pull-left.headerlink,.rst-content p .fa-pull-left.headerlink,.rst-content table>caption .fa-pull-left.headerlink,.rst-content tt.download span.fa-pull-left:first-child,.wy-menu-vertical li.current>a button.fa-pull-left.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-left.toctree-expand,.wy-menu-vertical li button.fa-pull-left.toctree-expand{margin-right:.3em}.fa-pull-right.icon,.fa.fa-pull-right,.rst-content .code-block-caption .fa-pull-right.headerlink,.rst-content .eqno .fa-pull-right.headerlink,.rst-content .fa-pull-right.admonition-title,.rst-content code.download span.fa-pull-right:first-child,.rst-content dl dt .fa-pull-right.headerlink,.rst-content h1 .fa-pull-right.headerlink,.rst-content h2 .fa-pull-right.headerlink,.rst-content h3 .fa-pull-right.headerlink,.rst-content h4 .fa-pull-right.headerlink,.rst-content h5 .fa-pull-right.headerlink,.rst-content h6 .fa-pull-right.headerlink,.rst-content p .fa-pull-right.headerlink,.rst-content table>caption .fa-pull-right.headerlink,.rst-content tt.download span.fa-pull-right:first-child,.wy-menu-vertical li.current>a button.fa-pull-right.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-right.toctree-expand,.wy-menu-vertical li button.fa-pull-right.toctree-expand{margin-left:.3em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left,.pull-left.icon,.rst-content .code-block-caption .pull-left.headerlink,.rst-content .eqno .pull-left.headerlink,.rst-content .pull-left.admonition-title,.rst-content code.download span.pull-left:first-child,.rst-content dl dt .pull-left.headerlink,.rst-content h1 .pull-left.headerlink,.rst-content h2 .pull-left.headerlink,.rst-content h3 .pull-left.headerlink,.rst-content h4 .pull-left.headerlink,.rst-content h5 .pull-left.headerlink,.rst-content h6 .pull-left.headerlink,.rst-content p .pull-left.headerlink,.rst-content table>caption .pull-left.headerlink,.rst-content tt.download span.pull-left:first-child,.wy-menu-vertical li.current>a button.pull-left.toctree-expand,.wy-menu-vertical li.on a button.pull-left.toctree-expand,.wy-menu-vertical li button.pull-left.toctree-expand{margin-right:.3em}.fa.pull-right,.pull-right.icon,.rst-content .code-block-caption .pull-right.headerlink,.rst-content .eqno .pull-right.headerlink,.rst-content .pull-right.admonition-title,.rst-content code.download span.pull-right:first-child,.rst-content dl dt .pull-right.headerlink,.rst-content h1 .pull-right.headerlink,.rst-content h2 .pull-right.headerlink,.rst-content h3 .pull-right.headerlink,.rst-content h4 .pull-right.headerlink,.rst-content h5 .pull-right.headerlink,.rst-content h6 .pull-right.headerlink,.rst-content p .pull-right.headerlink,.rst-content table>caption .pull-right.headerlink,.rst-content tt.download span.pull-right:first-child,.wy-menu-vertical li.current>a button.pull-right.toctree-expand,.wy-menu-vertical li.on a button.pull-right.toctree-expand,.wy-menu-vertical li button.pull-right.toctree-expand{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s linear infinite;animation:fa-spin 2s linear infinite}.fa-pulse{-webkit-animation:fa-spin 1s steps(8) infinite;animation:fa-spin 1s steps(8) infinite}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scaleX(-1);-ms-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scaleY(-1);-ms-transform:scaleY(-1);transform:scaleY(-1)}:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:""}.fa-music:before{content:""}.fa-search:before,.icon-search:before{content:""}.fa-envelope-o:before{content:""}.fa-heart:before{content:""}.fa-star:before{content:""}.fa-star-o:before{content:""}.fa-user:before{content:""}.fa-film:before{content:""}.fa-th-large:before{content:""}.fa-th:before{content:""}.fa-th-list:before{content:""}.fa-check:before{content:""}.fa-close:before,.fa-remove:before,.fa-times:before{content:""}.fa-search-plus:before{content:""}.fa-search-minus:before{content:""}.fa-power-off:before{content:""}.fa-signal:before{content:""}.fa-cog:before,.fa-gear:before{content:""}.fa-trash-o:before{content:""}.fa-home:before,.icon-home:before{content:""}.fa-file-o:before{content:""}.fa-clock-o:before{content:""}.fa-road:before{content:""}.fa-download:before,.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{content:""}.fa-arrow-circle-o-down:before{content:""}.fa-arrow-circle-o-up:before{content:""}.fa-inbox:before{content:""}.fa-play-circle-o:before{content:""}.fa-repeat:before,.fa-rotate-right:before{content:""}.fa-refresh:before{content:""}.fa-list-alt:before{content:""}.fa-lock:before{content:""}.fa-flag:before{content:""}.fa-headphones:before{content:""}.fa-volume-off:before{content:""}.fa-volume-down:before{content:""}.fa-volume-up:before{content:""}.fa-qrcode:before{content:""}.fa-barcode:before{content:""}.fa-tag:before{content:""}.fa-tags:before{content:""}.fa-book:before,.icon-book:before{content:""}.fa-bookmark:before{content:""}.fa-print:before{content:""}.fa-camera:before{content:""}.fa-font:before{content:""}.fa-bold:before{content:""}.fa-italic:before{content:""}.fa-text-height:before{content:""}.fa-text-width:before{content:""}.fa-align-left:before{content:""}.fa-align-center:before{content:""}.fa-align-right:before{content:""}.fa-align-justify:before{content:""}.fa-list:before{content:""}.fa-dedent:before,.fa-outdent:before{content:""}.fa-indent:before{content:""}.fa-video-camera:before{content:""}.fa-image:before,.fa-photo:before,.fa-picture-o:before{content:""}.fa-pencil:before{content:""}.fa-map-marker:before{content:""}.fa-adjust:before{content:""}.fa-tint:before{content:""}.fa-edit:before,.fa-pencil-square-o:before{content:""}.fa-share-square-o:before{content:""}.fa-check-square-o:before{content:""}.fa-arrows:before{content:""}.fa-step-backward:before{content:""}.fa-fast-backward:before{content:""}.fa-backward:before{content:""}.fa-play:before{content:""}.fa-pause:before{content:""}.fa-stop:before{content:""}.fa-forward:before{content:""}.fa-fast-forward:before{content:""}.fa-step-forward:before{content:""}.fa-eject:before{content:""}.fa-chevron-left:before{content:""}.fa-chevron-right:before{content:""}.fa-plus-circle:before{content:""}.fa-minus-circle:before{content:""}.fa-times-circle:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:""}.fa-check-circle:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:""}.fa-question-circle:before{content:""}.fa-info-circle:before{content:""}.fa-crosshairs:before{content:""}.fa-times-circle-o:before{content:""}.fa-check-circle-o:before{content:""}.fa-ban:before{content:""}.fa-arrow-left:before{content:""}.fa-arrow-right:before{content:""}.fa-arrow-up:before{content:""}.fa-arrow-down:before{content:""}.fa-mail-forward:before,.fa-share:before{content:""}.fa-expand:before{content:""}.fa-compress:before{content:""}.fa-plus:before{content:""}.fa-minus:before{content:""}.fa-asterisk:before{content:""}.fa-exclamation-circle:before,.rst-content .admonition-title:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before{content:""}.fa-gift:before{content:""}.fa-leaf:before{content:""}.fa-fire:before,.icon-fire:before{content:""}.fa-eye:before{content:""}.fa-eye-slash:before{content:""}.fa-exclamation-triangle:before,.fa-warning:before{content:""}.fa-plane:before{content:""}.fa-calendar:before{content:""}.fa-random:before{content:""}.fa-comment:before{content:""}.fa-magnet:before{content:""}.fa-chevron-up:before{content:""}.fa-chevron-down:before{content:""}.fa-retweet:before{content:""}.fa-shopping-cart:before{content:""}.fa-folder:before{content:""}.fa-folder-open:before{content:""}.fa-arrows-v:before{content:""}.fa-arrows-h:before{content:""}.fa-bar-chart-o:before,.fa-bar-chart:before{content:""}.fa-twitter-square:before{content:""}.fa-facebook-square:before{content:""}.fa-camera-retro:before{content:""}.fa-key:before{content:""}.fa-cogs:before,.fa-gears:before{content:""}.fa-comments:before{content:""}.fa-thumbs-o-up:before{content:""}.fa-thumbs-o-down:before{content:""}.fa-star-half:before{content:""}.fa-heart-o:before{content:""}.fa-sign-out:before{content:""}.fa-linkedin-square:before{content:""}.fa-thumb-tack:before{content:""}.fa-external-link:before{content:""}.fa-sign-in:before{content:""}.fa-trophy:before{content:""}.fa-github-square:before{content:""}.fa-upload:before{content:""}.fa-lemon-o:before{content:""}.fa-phone:before{content:""}.fa-square-o:before{content:""}.fa-bookmark-o:before{content:""}.fa-phone-square:before{content:""}.fa-twitter:before{content:""}.fa-facebook-f:before,.fa-facebook:before{content:""}.fa-github:before,.icon-github:before{content:""}.fa-unlock:before{content:""}.fa-credit-card:before{content:""}.fa-feed:before,.fa-rss:before{content:""}.fa-hdd-o:before{content:""}.fa-bullhorn:before{content:""}.fa-bell:before{content:""}.fa-certificate:before{content:""}.fa-hand-o-right:before{content:""}.fa-hand-o-left:before{content:""}.fa-hand-o-up:before{content:""}.fa-hand-o-down:before{content:""}.fa-arrow-circle-left:before,.icon-circle-arrow-left:before{content:""}.fa-arrow-circle-right:before,.icon-circle-arrow-right:before{content:""}.fa-arrow-circle-up:before{content:""}.fa-arrow-circle-down:before{content:""}.fa-globe:before{content:""}.fa-wrench:before{content:""}.fa-tasks:before{content:""}.fa-filter:before{content:""}.fa-briefcase:before{content:""}.fa-arrows-alt:before{content:""}.fa-group:before,.fa-users:before{content:""}.fa-chain:before,.fa-link:before,.icon-link:before{content:""}.fa-cloud:before{content:""}.fa-flask:before{content:""}.fa-cut:before,.fa-scissors:before{content:""}.fa-copy:before,.fa-files-o:before{content:""}.fa-paperclip:before{content:""}.fa-floppy-o:before,.fa-save:before{content:""}.fa-square:before{content:""}.fa-bars:before,.fa-navicon:before,.fa-reorder:before{content:""}.fa-list-ul:before{content:""}.fa-list-ol:before{content:""}.fa-strikethrough:before{content:""}.fa-underline:before{content:""}.fa-table:before{content:""}.fa-magic:before{content:""}.fa-truck:before{content:""}.fa-pinterest:before{content:""}.fa-pinterest-square:before{content:""}.fa-google-plus-square:before{content:""}.fa-google-plus:before{content:""}.fa-money:before{content:""}.fa-caret-down:before,.icon-caret-down:before,.wy-dropdown .caret:before{content:""}.fa-caret-up:before{content:""}.fa-caret-left:before{content:""}.fa-caret-right:before{content:""}.fa-columns:before{content:""}.fa-sort:before,.fa-unsorted:before{content:""}.fa-sort-desc:before,.fa-sort-down:before{content:""}.fa-sort-asc:before,.fa-sort-up:before{content:""}.fa-envelope:before{content:""}.fa-linkedin:before{content:""}.fa-rotate-left:before,.fa-undo:before{content:""}.fa-gavel:before,.fa-legal:before{content:""}.fa-dashboard:before,.fa-tachometer:before{content:""}.fa-comment-o:before{content:""}.fa-comments-o:before{content:""}.fa-bolt:before,.fa-flash:before{content:""}.fa-sitemap:before{content:""}.fa-umbrella:before{content:""}.fa-clipboard:before,.fa-paste:before{content:""}.fa-lightbulb-o:before{content:""}.fa-exchange:before{content:""}.fa-cloud-download:before{content:""}.fa-cloud-upload:before{content:""}.fa-user-md:before{content:""}.fa-stethoscope:before{content:""}.fa-suitcase:before{content:""}.fa-bell-o:before{content:""}.fa-coffee:before{content:""}.fa-cutlery:before{content:""}.fa-file-text-o:before{content:""}.fa-building-o:before{content:""}.fa-hospital-o:before{content:""}.fa-ambulance:before{content:""}.fa-medkit:before{content:""}.fa-fighter-jet:before{content:""}.fa-beer:before{content:""}.fa-h-square:before{content:""}.fa-plus-square:before{content:""}.fa-angle-double-left:before{content:""}.fa-angle-double-right:before{content:""}.fa-angle-double-up:before{content:""}.fa-angle-double-down:before{content:""}.fa-angle-left:before{content:""}.fa-angle-right:before{content:""}.fa-angle-up:before{content:""}.fa-angle-down:before{content:""}.fa-desktop:before{content:""}.fa-laptop:before{content:""}.fa-tablet:before{content:""}.fa-mobile-phone:before,.fa-mobile:before{content:""}.fa-circle-o:before{content:""}.fa-quote-left:before{content:""}.fa-quote-right:before{content:""}.fa-spinner:before{content:""}.fa-circle:before{content:""}.fa-mail-reply:before,.fa-reply:before{content:""}.fa-github-alt:before{content:""}.fa-folder-o:before{content:""}.fa-folder-open-o:before{content:""}.fa-smile-o:before{content:""}.fa-frown-o:before{content:""}.fa-meh-o:before{content:""}.fa-gamepad:before{content:""}.fa-keyboard-o:before{content:""}.fa-flag-o:before{content:""}.fa-flag-checkered:before{content:""}.fa-terminal:before{content:""}.fa-code:before{content:""}.fa-mail-reply-all:before,.fa-reply-all:before{content:""}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:""}.fa-location-arrow:before{content:""}.fa-crop:before{content:""}.fa-code-fork:before{content:""}.fa-chain-broken:before,.fa-unlink:before{content:""}.fa-question:before{content:""}.fa-info:before{content:""}.fa-exclamation:before{content:""}.fa-superscript:before{content:""}.fa-subscript:before{content:""}.fa-eraser:before{content:""}.fa-puzzle-piece:before{content:""}.fa-microphone:before{content:""}.fa-microphone-slash:before{content:""}.fa-shield:before{content:""}.fa-calendar-o:before{content:""}.fa-fire-extinguisher:before{content:""}.fa-rocket:before{content:""}.fa-maxcdn:before{content:""}.fa-chevron-circle-left:before{content:""}.fa-chevron-circle-right:before{content:""}.fa-chevron-circle-up:before{content:""}.fa-chevron-circle-down:before{content:""}.fa-html5:before{content:""}.fa-css3:before{content:""}.fa-anchor:before{content:""}.fa-unlock-alt:before{content:""}.fa-bullseye:before{content:""}.fa-ellipsis-h:before{content:""}.fa-ellipsis-v:before{content:""}.fa-rss-square:before{content:""}.fa-play-circle:before{content:""}.fa-ticket:before{content:""}.fa-minus-square:before{content:""}.fa-minus-square-o:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before{content:""}.fa-level-up:before{content:""}.fa-level-down:before{content:""}.fa-check-square:before{content:""}.fa-pencil-square:before{content:""}.fa-external-link-square:before{content:""}.fa-share-square:before{content:""}.fa-compass:before{content:""}.fa-caret-square-o-down:before,.fa-toggle-down:before{content:""}.fa-caret-square-o-up:before,.fa-toggle-up:before{content:""}.fa-caret-square-o-right:before,.fa-toggle-right:before{content:""}.fa-eur:before,.fa-euro:before{content:""}.fa-gbp:before{content:""}.fa-dollar:before,.fa-usd:before{content:""}.fa-inr:before,.fa-rupee:before{content:""}.fa-cny:before,.fa-jpy:before,.fa-rmb:before,.fa-yen:before{content:""}.fa-rouble:before,.fa-rub:before,.fa-ruble:before{content:""}.fa-krw:before,.fa-won:before{content:""}.fa-bitcoin:before,.fa-btc:before{content:""}.fa-file:before{content:""}.fa-file-text:before{content:""}.fa-sort-alpha-asc:before{content:""}.fa-sort-alpha-desc:before{content:""}.fa-sort-amount-asc:before{content:""}.fa-sort-amount-desc:before{content:""}.fa-sort-numeric-asc:before{content:""}.fa-sort-numeric-desc:before{content:""}.fa-thumbs-up:before{content:""}.fa-thumbs-down:before{content:""}.fa-youtube-square:before{content:""}.fa-youtube:before{content:""}.fa-xing:before{content:""}.fa-xing-square:before{content:""}.fa-youtube-play:before{content:""}.fa-dropbox:before{content:""}.fa-stack-overflow:before{content:""}.fa-instagram:before{content:""}.fa-flickr:before{content:""}.fa-adn:before{content:""}.fa-bitbucket:before,.icon-bitbucket:before{content:""}.fa-bitbucket-square:before{content:""}.fa-tumblr:before{content:""}.fa-tumblr-square:before{content:""}.fa-long-arrow-down:before{content:""}.fa-long-arrow-up:before{content:""}.fa-long-arrow-left:before{content:""}.fa-long-arrow-right:before{content:""}.fa-apple:before{content:""}.fa-windows:before{content:""}.fa-android:before{content:""}.fa-linux:before{content:""}.fa-dribbble:before{content:""}.fa-skype:before{content:""}.fa-foursquare:before{content:""}.fa-trello:before{content:""}.fa-female:before{content:""}.fa-male:before{content:""}.fa-gittip:before,.fa-gratipay:before{content:""}.fa-sun-o:before{content:""}.fa-moon-o:before{content:""}.fa-archive:before{content:""}.fa-bug:before{content:""}.fa-vk:before{content:""}.fa-weibo:before{content:""}.fa-renren:before{content:""}.fa-pagelines:before{content:""}.fa-stack-exchange:before{content:""}.fa-arrow-circle-o-right:before{content:""}.fa-arrow-circle-o-left:before{content:""}.fa-caret-square-o-left:before,.fa-toggle-left:before{content:""}.fa-dot-circle-o:before{content:""}.fa-wheelchair:before{content:""}.fa-vimeo-square:before{content:""}.fa-try:before,.fa-turkish-lira:before{content:""}.fa-plus-square-o:before,.wy-menu-vertical li button.toctree-expand:before{content:""}.fa-space-shuttle:before{content:""}.fa-slack:before{content:""}.fa-envelope-square:before{content:""}.fa-wordpress:before{content:""}.fa-openid:before{content:""}.fa-bank:before,.fa-institution:before,.fa-university:before{content:""}.fa-graduation-cap:before,.fa-mortar-board:before{content:""}.fa-yahoo:before{content:""}.fa-google:before{content:""}.fa-reddit:before{content:""}.fa-reddit-square:before{content:""}.fa-stumbleupon-circle:before{content:""}.fa-stumbleupon:before{content:""}.fa-delicious:before{content:""}.fa-digg:before{content:""}.fa-pied-piper-pp:before{content:""}.fa-pied-piper-alt:before{content:""}.fa-drupal:before{content:""}.fa-joomla:before{content:""}.fa-language:before{content:""}.fa-fax:before{content:""}.fa-building:before{content:""}.fa-child:before{content:""}.fa-paw:before{content:""}.fa-spoon:before{content:""}.fa-cube:before{content:""}.fa-cubes:before{content:""}.fa-behance:before{content:""}.fa-behance-square:before{content:""}.fa-steam:before{content:""}.fa-steam-square:before{content:""}.fa-recycle:before{content:""}.fa-automobile:before,.fa-car:before{content:""}.fa-cab:before,.fa-taxi:before{content:""}.fa-tree:before{content:""}.fa-spotify:before{content:""}.fa-deviantart:before{content:""}.fa-soundcloud:before{content:""}.fa-database:before{content:""}.fa-file-pdf-o:before{content:""}.fa-file-word-o:before{content:""}.fa-file-excel-o:before{content:""}.fa-file-powerpoint-o:before{content:""}.fa-file-image-o:before,.fa-file-photo-o:before,.fa-file-picture-o:before{content:""}.fa-file-archive-o:before,.fa-file-zip-o:before{content:""}.fa-file-audio-o:before,.fa-file-sound-o:before{content:""}.fa-file-movie-o:before,.fa-file-video-o:before{content:""}.fa-file-code-o:before{content:""}.fa-vine:before{content:""}.fa-codepen:before{content:""}.fa-jsfiddle:before{content:""}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-ring:before,.fa-life-saver:before,.fa-support:before{content:""}.fa-circle-o-notch:before{content:""}.fa-ra:before,.fa-rebel:before,.fa-resistance:before{content:""}.fa-empire:before,.fa-ge:before{content:""}.fa-git-square:before{content:""}.fa-git:before{content:""}.fa-hacker-news:before,.fa-y-combinator-square:before,.fa-yc-square:before{content:""}.fa-tencent-weibo:before{content:""}.fa-qq:before{content:""}.fa-wechat:before,.fa-weixin:before{content:""}.fa-paper-plane:before,.fa-send:before{content:""}.fa-paper-plane-o:before,.fa-send-o:before{content:""}.fa-history:before{content:""}.fa-circle-thin:before{content:""}.fa-header:before{content:""}.fa-paragraph:before{content:""}.fa-sliders:before{content:""}.fa-share-alt:before{content:""}.fa-share-alt-square:before{content:""}.fa-bomb:before{content:""}.fa-futbol-o:before,.fa-soccer-ball-o:before{content:""}.fa-tty:before{content:""}.fa-binoculars:before{content:""}.fa-plug:before{content:""}.fa-slideshare:before{content:""}.fa-twitch:before{content:""}.fa-yelp:before{content:""}.fa-newspaper-o:before{content:""}.fa-wifi:before{content:""}.fa-calculator:before{content:""}.fa-paypal:before{content:""}.fa-google-wallet:before{content:""}.fa-cc-visa:before{content:""}.fa-cc-mastercard:before{content:""}.fa-cc-discover:before{content:""}.fa-cc-amex:before{content:""}.fa-cc-paypal:before{content:""}.fa-cc-stripe:before{content:""}.fa-bell-slash:before{content:""}.fa-bell-slash-o:before{content:""}.fa-trash:before{content:""}.fa-copyright:before{content:""}.fa-at:before{content:""}.fa-eyedropper:before{content:""}.fa-paint-brush:before{content:""}.fa-birthday-cake:before{content:""}.fa-area-chart:before{content:""}.fa-pie-chart:before{content:""}.fa-line-chart:before{content:""}.fa-lastfm:before{content:""}.fa-lastfm-square:before{content:""}.fa-toggle-off:before{content:""}.fa-toggle-on:before{content:""}.fa-bicycle:before{content:""}.fa-bus:before{content:""}.fa-ioxhost:before{content:""}.fa-angellist:before{content:""}.fa-cc:before{content:""}.fa-ils:before,.fa-shekel:before,.fa-sheqel:before{content:""}.fa-meanpath:before{content:""}.fa-buysellads:before{content:""}.fa-connectdevelop:before{content:""}.fa-dashcube:before{content:""}.fa-forumbee:before{content:""}.fa-leanpub:before{content:""}.fa-sellsy:before{content:""}.fa-shirtsinbulk:before{content:""}.fa-simplybuilt:before{content:""}.fa-skyatlas:before{content:""}.fa-cart-plus:before{content:""}.fa-cart-arrow-down:before{content:""}.fa-diamond:before{content:""}.fa-ship:before{content:""}.fa-user-secret:before{content:""}.fa-motorcycle:before{content:""}.fa-street-view:before{content:""}.fa-heartbeat:before{content:""}.fa-venus:before{content:""}.fa-mars:before{content:""}.fa-mercury:before{content:""}.fa-intersex:before,.fa-transgender:before{content:""}.fa-transgender-alt:before{content:""}.fa-venus-double:before{content:""}.fa-mars-double:before{content:""}.fa-venus-mars:before{content:""}.fa-mars-stroke:before{content:""}.fa-mars-stroke-v:before{content:""}.fa-mars-stroke-h:before{content:""}.fa-neuter:before{content:""}.fa-genderless:before{content:""}.fa-facebook-official:before{content:""}.fa-pinterest-p:before{content:""}.fa-whatsapp:before{content:""}.fa-server:before{content:""}.fa-user-plus:before{content:""}.fa-user-times:before{content:""}.fa-bed:before,.fa-hotel:before{content:""}.fa-viacoin:before{content:""}.fa-train:before{content:""}.fa-subway:before{content:""}.fa-medium:before{content:""}.fa-y-combinator:before,.fa-yc:before{content:""}.fa-optin-monster:before{content:""}.fa-opencart:before{content:""}.fa-expeditedssl:before{content:""}.fa-battery-4:before,.fa-battery-full:before,.fa-battery:before{content:""}.fa-battery-3:before,.fa-battery-three-quarters:before{content:""}.fa-battery-2:before,.fa-battery-half:before{content:""}.fa-battery-1:before,.fa-battery-quarter:before{content:""}.fa-battery-0:before,.fa-battery-empty:before{content:""}.fa-mouse-pointer:before{content:""}.fa-i-cursor:before{content:""}.fa-object-group:before{content:""}.fa-object-ungroup:before{content:""}.fa-sticky-note:before{content:""}.fa-sticky-note-o:before{content:""}.fa-cc-jcb:before{content:""}.fa-cc-diners-club:before{content:""}.fa-clone:before{content:""}.fa-balance-scale:before{content:""}.fa-hourglass-o:before{content:""}.fa-hourglass-1:before,.fa-hourglass-start:before{content:""}.fa-hourglass-2:before,.fa-hourglass-half:before{content:""}.fa-hourglass-3:before,.fa-hourglass-end:before{content:""}.fa-hourglass:before{content:""}.fa-hand-grab-o:before,.fa-hand-rock-o:before{content:""}.fa-hand-paper-o:before,.fa-hand-stop-o:before{content:""}.fa-hand-scissors-o:before{content:""}.fa-hand-lizard-o:before{content:""}.fa-hand-spock-o:before{content:""}.fa-hand-pointer-o:before{content:""}.fa-hand-peace-o:before{content:""}.fa-trademark:before{content:""}.fa-registered:before{content:""}.fa-creative-commons:before{content:""}.fa-gg:before{content:""}.fa-gg-circle:before{content:""}.fa-tripadvisor:before{content:""}.fa-odnoklassniki:before{content:""}.fa-odnoklassniki-square:before{content:""}.fa-get-pocket:before{content:""}.fa-wikipedia-w:before{content:""}.fa-safari:before{content:""}.fa-chrome:before{content:""}.fa-firefox:before{content:""}.fa-opera:before{content:""}.fa-internet-explorer:before{content:""}.fa-television:before,.fa-tv:before{content:""}.fa-contao:before{content:""}.fa-500px:before{content:""}.fa-amazon:before{content:""}.fa-calendar-plus-o:before{content:""}.fa-calendar-minus-o:before{content:""}.fa-calendar-times-o:before{content:""}.fa-calendar-check-o:before{content:""}.fa-industry:before{content:""}.fa-map-pin:before{content:""}.fa-map-signs:before{content:""}.fa-map-o:before{content:""}.fa-map:before{content:""}.fa-commenting:before{content:""}.fa-commenting-o:before{content:""}.fa-houzz:before{content:""}.fa-vimeo:before{content:""}.fa-black-tie:before{content:""}.fa-fonticons:before{content:""}.fa-reddit-alien:before{content:""}.fa-edge:before{content:""}.fa-credit-card-alt:before{content:""}.fa-codiepie:before{content:""}.fa-modx:before{content:""}.fa-fort-awesome:before{content:""}.fa-usb:before{content:""}.fa-product-hunt:before{content:""}.fa-mixcloud:before{content:""}.fa-scribd:before{content:""}.fa-pause-circle:before{content:""}.fa-pause-circle-o:before{content:""}.fa-stop-circle:before{content:""}.fa-stop-circle-o:before{content:""}.fa-shopping-bag:before{content:""}.fa-shopping-basket:before{content:""}.fa-hashtag:before{content:""}.fa-bluetooth:before{content:""}.fa-bluetooth-b:before{content:""}.fa-percent:before{content:""}.fa-gitlab:before,.icon-gitlab:before{content:""}.fa-wpbeginner:before{content:""}.fa-wpforms:before{content:""}.fa-envira:before{content:""}.fa-universal-access:before{content:""}.fa-wheelchair-alt:before{content:""}.fa-question-circle-o:before{content:""}.fa-blind:before{content:""}.fa-audio-description:before{content:""}.fa-volume-control-phone:before{content:""}.fa-braille:before{content:""}.fa-assistive-listening-systems:before{content:""}.fa-american-sign-language-interpreting:before,.fa-asl-interpreting:before{content:""}.fa-deaf:before,.fa-deafness:before,.fa-hard-of-hearing:before{content:""}.fa-glide:before{content:""}.fa-glide-g:before{content:""}.fa-sign-language:before,.fa-signing:before{content:""}.fa-low-vision:before{content:""}.fa-viadeo:before{content:""}.fa-viadeo-square:before{content:""}.fa-snapchat:before{content:""}.fa-snapchat-ghost:before{content:""}.fa-snapchat-square:before{content:""}.fa-pied-piper:before{content:""}.fa-first-order:before{content:""}.fa-yoast:before{content:""}.fa-themeisle:before{content:""}.fa-google-plus-circle:before,.fa-google-plus-official:before{content:""}.fa-fa:before,.fa-font-awesome:before{content:""}.fa-handshake-o:before{content:""}.fa-envelope-open:before{content:""}.fa-envelope-open-o:before{content:""}.fa-linode:before{content:""}.fa-address-book:before{content:""}.fa-address-book-o:before{content:""}.fa-address-card:before,.fa-vcard:before{content:""}.fa-address-card-o:before,.fa-vcard-o:before{content:""}.fa-user-circle:before{content:""}.fa-user-circle-o:before{content:""}.fa-user-o:before{content:""}.fa-id-badge:before{content:""}.fa-drivers-license:before,.fa-id-card:before{content:""}.fa-drivers-license-o:before,.fa-id-card-o:before{content:""}.fa-quora:before{content:""}.fa-free-code-camp:before{content:""}.fa-telegram:before{content:""}.fa-thermometer-4:before,.fa-thermometer-full:before,.fa-thermometer:before{content:""}.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:""}.fa-thermometer-2:before,.fa-thermometer-half:before{content:""}.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:""}.fa-thermometer-0:before,.fa-thermometer-empty:before{content:""}.fa-shower:before{content:""}.fa-bath:before,.fa-bathtub:before,.fa-s15:before{content:""}.fa-podcast:before{content:""}.fa-window-maximize:before{content:""}.fa-window-minimize:before{content:""}.fa-window-restore:before{content:""}.fa-times-rectangle:before,.fa-window-close:before{content:""}.fa-times-rectangle-o:before,.fa-window-close-o:before{content:""}.fa-bandcamp:before{content:""}.fa-grav:before{content:""}.fa-etsy:before{content:""}.fa-imdb:before{content:""}.fa-ravelry:before{content:""}.fa-eercast:before{content:""}.fa-microchip:before{content:""}.fa-snowflake-o:before{content:""}.fa-superpowers:before{content:""}.fa-wpexplorer:before{content:""}.fa-meetup:before{content:""}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-dropdown .caret,.wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{font-family:inherit}.fa:before,.icon:before,.rst-content .admonition-title:before,.rst-content .code-block-caption .headerlink:before,.rst-content .eqno .headerlink:before,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before{font-family:FontAwesome;display:inline-block;font-style:normal;font-weight:400;line-height:1;text-decoration:inherit}.rst-content .code-block-caption a .headerlink,.rst-content .eqno a .headerlink,.rst-content a .admonition-title,.rst-content code.download a span:first-child,.rst-content dl dt a .headerlink,.rst-content h1 a .headerlink,.rst-content h2 a .headerlink,.rst-content h3 a .headerlink,.rst-content h4 a .headerlink,.rst-content h5 a .headerlink,.rst-content h6 a .headerlink,.rst-content p.caption a .headerlink,.rst-content p a .headerlink,.rst-content table>caption a .headerlink,.rst-content tt.download a span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li a button.toctree-expand,a .fa,a .icon,a .rst-content .admonition-title,a .rst-content .code-block-caption .headerlink,a .rst-content .eqno .headerlink,a .rst-content code.download span:first-child,a .rst-content dl dt .headerlink,a .rst-content h1 .headerlink,a .rst-content h2 .headerlink,a .rst-content h3 .headerlink,a .rst-content h4 .headerlink,a .rst-content h5 .headerlink,a .rst-content h6 .headerlink,a .rst-content p.caption .headerlink,a .rst-content p .headerlink,a .rst-content table>caption .headerlink,a .rst-content tt.download span:first-child,a .wy-menu-vertical li button.toctree-expand{display:inline-block;text-decoration:inherit}.btn .fa,.btn .icon,.btn .rst-content .admonition-title,.btn .rst-content .code-block-caption .headerlink,.btn .rst-content .eqno .headerlink,.btn .rst-content code.download span:first-child,.btn .rst-content dl dt .headerlink,.btn .rst-content h1 .headerlink,.btn .rst-content h2 .headerlink,.btn .rst-content h3 .headerlink,.btn .rst-content h4 .headerlink,.btn .rst-content h5 .headerlink,.btn .rst-content h6 .headerlink,.btn .rst-content p .headerlink,.btn .rst-content table>caption .headerlink,.btn .rst-content tt.download span:first-child,.btn .wy-menu-vertical li.current>a button.toctree-expand,.btn .wy-menu-vertical li.on a button.toctree-expand,.btn .wy-menu-vertical li button.toctree-expand,.nav .fa,.nav .icon,.nav .rst-content .admonition-title,.nav .rst-content .code-block-caption .headerlink,.nav .rst-content .eqno .headerlink,.nav .rst-content code.download span:first-child,.nav .rst-content dl dt .headerlink,.nav .rst-content h1 .headerlink,.nav .rst-content h2 .headerlink,.nav .rst-content h3 .headerlink,.nav .rst-content h4 .headerlink,.nav .rst-content h5 .headerlink,.nav .rst-content h6 .headerlink,.nav .rst-content p .headerlink,.nav .rst-content table>caption .headerlink,.nav .rst-content tt.download span:first-child,.nav .wy-menu-vertical li.current>a button.toctree-expand,.nav .wy-menu-vertical li.on a button.toctree-expand,.nav .wy-menu-vertical li button.toctree-expand,.rst-content .btn .admonition-title,.rst-content .code-block-caption .btn .headerlink,.rst-content .code-block-caption .nav .headerlink,.rst-content .eqno .btn .headerlink,.rst-content .eqno .nav .headerlink,.rst-content .nav .admonition-title,.rst-content code.download .btn span:first-child,.rst-content code.download .nav span:first-child,.rst-content dl dt .btn .headerlink,.rst-content dl dt .nav .headerlink,.rst-content h1 .btn .headerlink,.rst-content h1 .nav .headerlink,.rst-content h2 .btn .headerlink,.rst-content h2 .nav .headerlink,.rst-content h3 .btn .headerlink,.rst-content h3 .nav .headerlink,.rst-content h4 .btn .headerlink,.rst-content h4 .nav .headerlink,.rst-content h5 .btn .headerlink,.rst-content h5 .nav .headerlink,.rst-content h6 .btn .headerlink,.rst-content h6 .nav .headerlink,.rst-content p .btn .headerlink,.rst-content p .nav .headerlink,.rst-content table>caption .btn .headerlink,.rst-content table>caption .nav .headerlink,.rst-content tt.download .btn span:first-child,.rst-content tt.download .nav span:first-child,.wy-menu-vertical li .btn button.toctree-expand,.wy-menu-vertical li.current>a .btn button.toctree-expand,.wy-menu-vertical li.current>a .nav button.toctree-expand,.wy-menu-vertical li .nav button.toctree-expand,.wy-menu-vertical li.on a .btn button.toctree-expand,.wy-menu-vertical li.on a .nav button.toctree-expand{display:inline}.btn .fa-large.icon,.btn .fa.fa-large,.btn .rst-content .code-block-caption .fa-large.headerlink,.btn .rst-content .eqno .fa-large.headerlink,.btn .rst-content .fa-large.admonition-title,.btn .rst-content code.download span.fa-large:first-child,.btn .rst-content dl dt .fa-large.headerlink,.btn .rst-content h1 .fa-large.headerlink,.btn .rst-content h2 .fa-large.headerlink,.btn .rst-content h3 .fa-large.headerlink,.btn .rst-content h4 .fa-large.headerlink,.btn .rst-content h5 .fa-large.headerlink,.btn .rst-content h6 .fa-large.headerlink,.btn .rst-content p .fa-large.headerlink,.btn .rst-content table>caption .fa-large.headerlink,.btn .rst-content tt.download span.fa-large:first-child,.btn .wy-menu-vertical li button.fa-large.toctree-expand,.nav .fa-large.icon,.nav .fa.fa-large,.nav .rst-content .code-block-caption .fa-large.headerlink,.nav .rst-content .eqno .fa-large.headerlink,.nav .rst-content .fa-large.admonition-title,.nav .rst-content code.download span.fa-large:first-child,.nav .rst-content dl dt .fa-large.headerlink,.nav .rst-content h1 .fa-large.headerlink,.nav .rst-content h2 .fa-large.headerlink,.nav .rst-content h3 .fa-large.headerlink,.nav .rst-content h4 .fa-large.headerlink,.nav .rst-content h5 .fa-large.headerlink,.nav .rst-content h6 .fa-large.headerlink,.nav .rst-content p .fa-large.headerlink,.nav .rst-content table>caption .fa-large.headerlink,.nav .rst-content tt.download span.fa-large:first-child,.nav .wy-menu-vertical li button.fa-large.toctree-expand,.rst-content .btn .fa-large.admonition-title,.rst-content .code-block-caption .btn .fa-large.headerlink,.rst-content .code-block-caption .nav .fa-large.headerlink,.rst-content .eqno .btn .fa-large.headerlink,.rst-content .eqno .nav .fa-large.headerlink,.rst-content .nav .fa-large.admonition-title,.rst-content code.download .btn span.fa-large:first-child,.rst-content code.download .nav span.fa-large:first-child,.rst-content dl dt .btn .fa-large.headerlink,.rst-content dl dt .nav .fa-large.headerlink,.rst-content h1 .btn .fa-large.headerlink,.rst-content h1 .nav .fa-large.headerlink,.rst-content h2 .btn .fa-large.headerlink,.rst-content h2 .nav .fa-large.headerlink,.rst-content h3 .btn .fa-large.headerlink,.rst-content h3 .nav .fa-large.headerlink,.rst-content h4 .btn .fa-large.headerlink,.rst-content h4 .nav .fa-large.headerlink,.rst-content h5 .btn .fa-large.headerlink,.rst-content h5 .nav .fa-large.headerlink,.rst-content h6 .btn .fa-large.headerlink,.rst-content h6 .nav .fa-large.headerlink,.rst-content p .btn .fa-large.headerlink,.rst-content p .nav .fa-large.headerlink,.rst-content table>caption .btn .fa-large.headerlink,.rst-content table>caption .nav .fa-large.headerlink,.rst-content tt.download .btn span.fa-large:first-child,.rst-content tt.download .nav span.fa-large:first-child,.wy-menu-vertical li .btn button.fa-large.toctree-expand,.wy-menu-vertical li .nav button.fa-large.toctree-expand{line-height:.9em}.btn .fa-spin.icon,.btn .fa.fa-spin,.btn .rst-content .code-block-caption .fa-spin.headerlink,.btn .rst-content .eqno .fa-spin.headerlink,.btn .rst-content .fa-spin.admonition-title,.btn .rst-content code.download span.fa-spin:first-child,.btn .rst-content dl dt .fa-spin.headerlink,.btn .rst-content h1 .fa-spin.headerlink,.btn .rst-content h2 .fa-spin.headerlink,.btn .rst-content h3 .fa-spin.headerlink,.btn .rst-content h4 .fa-spin.headerlink,.btn .rst-content h5 .fa-spin.headerlink,.btn .rst-content h6 .fa-spin.headerlink,.btn .rst-content p .fa-spin.headerlink,.btn .rst-content table>caption .fa-spin.headerlink,.btn .rst-content tt.download span.fa-spin:first-child,.btn .wy-menu-vertical li button.fa-spin.toctree-expand,.nav .fa-spin.icon,.nav .fa.fa-spin,.nav .rst-content .code-block-caption .fa-spin.headerlink,.nav .rst-content .eqno .fa-spin.headerlink,.nav .rst-content .fa-spin.admonition-title,.nav .rst-content code.download span.fa-spin:first-child,.nav .rst-content dl dt .fa-spin.headerlink,.nav .rst-content h1 .fa-spin.headerlink,.nav .rst-content h2 .fa-spin.headerlink,.nav .rst-content h3 .fa-spin.headerlink,.nav .rst-content h4 .fa-spin.headerlink,.nav .rst-content h5 .fa-spin.headerlink,.nav .rst-content h6 .fa-spin.headerlink,.nav .rst-content p .fa-spin.headerlink,.nav .rst-content table>caption .fa-spin.headerlink,.nav .rst-content tt.download span.fa-spin:first-child,.nav .wy-menu-vertical li button.fa-spin.toctree-expand,.rst-content .btn .fa-spin.admonition-title,.rst-content .code-block-caption .btn .fa-spin.headerlink,.rst-content .code-block-caption .nav .fa-spin.headerlink,.rst-content .eqno .btn .fa-spin.headerlink,.rst-content .eqno .nav .fa-spin.headerlink,.rst-content .nav .fa-spin.admonition-title,.rst-content code.download .btn span.fa-spin:first-child,.rst-content code.download .nav span.fa-spin:first-child,.rst-content dl dt .btn .fa-spin.headerlink,.rst-content dl dt .nav .fa-spin.headerlink,.rst-content h1 .btn .fa-spin.headerlink,.rst-content h1 .nav .fa-spin.headerlink,.rst-content h2 .btn .fa-spin.headerlink,.rst-content h2 .nav .fa-spin.headerlink,.rst-content h3 .btn .fa-spin.headerlink,.rst-content h3 .nav .fa-spin.headerlink,.rst-content h4 .btn .fa-spin.headerlink,.rst-content h4 .nav .fa-spin.headerlink,.rst-content h5 .btn .fa-spin.headerlink,.rst-content h5 .nav .fa-spin.headerlink,.rst-content h6 .btn .fa-spin.headerlink,.rst-content h6 .nav .fa-spin.headerlink,.rst-content p .btn .fa-spin.headerlink,.rst-content p .nav .fa-spin.headerlink,.rst-content table>caption .btn .fa-spin.headerlink,.rst-content table>caption .nav .fa-spin.headerlink,.rst-content tt.download .btn span.fa-spin:first-child,.rst-content tt.download .nav span.fa-spin:first-child,.wy-menu-vertical li .btn button.fa-spin.toctree-expand,.wy-menu-vertical li .nav button.fa-spin.toctree-expand{display:inline-block}.btn.fa:before,.btn.icon:before,.rst-content .btn.admonition-title:before,.rst-content .code-block-caption .btn.headerlink:before,.rst-content .eqno .btn.headerlink:before,.rst-content code.download span.btn:first-child:before,.rst-content dl dt .btn.headerlink:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content p .btn.headerlink:before,.rst-content table>caption .btn.headerlink:before,.rst-content tt.download span.btn:first-child:before,.wy-menu-vertical li button.btn.toctree-expand:before{opacity:.5;-webkit-transition:opacity .05s ease-in;-moz-transition:opacity .05s ease-in;transition:opacity .05s ease-in}.btn.fa:hover:before,.btn.icon:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content .code-block-caption .btn.headerlink:hover:before,.rst-content .eqno .btn.headerlink:hover:before,.rst-content code.download span.btn:first-child:hover:before,.rst-content dl dt .btn.headerlink:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content p .btn.headerlink:hover:before,.rst-content table>caption .btn.headerlink:hover:before,.rst-content tt.download span.btn:first-child:hover:before,.wy-menu-vertical li button.btn.toctree-expand:hover:before{opacity:1}.btn-mini .fa:before,.btn-mini .icon:before,.btn-mini .rst-content .admonition-title:before,.btn-mini .rst-content .code-block-caption .headerlink:before,.btn-mini .rst-content .eqno .headerlink:before,.btn-mini .rst-content code.download span:first-child:before,.btn-mini .rst-content dl dt .headerlink:before,.btn-mini .rst-content h1 .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.btn-mini .rst-content p .headerlink:before,.btn-mini .rst-content table>caption .headerlink:before,.btn-mini .rst-content tt.download span:first-child:before,.btn-mini .wy-menu-vertical li button.toctree-expand:before,.rst-content .btn-mini .admonition-title:before,.rst-content .code-block-caption .btn-mini .headerlink:before,.rst-content .eqno .btn-mini .headerlink:before,.rst-content code.download .btn-mini span:first-child:before,.rst-content dl dt .btn-mini .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.rst-content p .btn-mini .headerlink:before,.rst-content table>caption .btn-mini .headerlink:before,.rst-content tt.download .btn-mini span:first-child:before,.wy-menu-vertical li .btn-mini button.toctree-expand:before{font-size:14px;vertical-align:-15%}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.wy-alert{padding:12px;line-height:24px;margin-bottom:24px;background:#e7f2fa}.rst-content .admonition-title,.wy-alert-title{font-weight:700;display:block;color:#fff;background:#6ab0de;padding:6px 12px;margin:-12px -12px 12px}.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.admonition,.rst-content .wy-alert-danger.admonition-todo,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.seealso,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.wy-alert.wy-alert-danger{background:#fdf3f2}.rst-content .danger .admonition-title,.rst-content .danger .wy-alert-title,.rst-content .error .admonition-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.admonition-todo .admonition-title,.rst-content .wy-alert-danger.admonition-todo .wy-alert-title,.rst-content .wy-alert-danger.admonition .admonition-title,.rst-content .wy-alert-danger.admonition .wy-alert-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.seealso .admonition-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.wy-alert.wy-alert-danger .wy-alert-title{background:#f29f97}.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .warning,.rst-content .wy-alert-warning.admonition,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.note,.rst-content .wy-alert-warning.seealso,.rst-content .wy-alert-warning.tip,.wy-alert.wy-alert-warning{background:#ffedcc}.rst-content .admonition-todo .admonition-title,.rst-content .admonition-todo .wy-alert-title,.rst-content .attention .admonition-title,.rst-content .attention .wy-alert-title,.rst-content .caution .admonition-title,.rst-content .caution .wy-alert-title,.rst-content .warning .admonition-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.admonition .admonition-title,.rst-content .wy-alert-warning.admonition .wy-alert-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .wy-alert-warning.seealso .admonition-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.wy-alert.wy-alert-warning .wy-alert-title{background:#f0b37e}.rst-content .note,.rst-content .seealso,.rst-content .wy-alert-info.admonition,.rst-content .wy-alert-info.admonition-todo,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.wy-alert.wy-alert-info{background:#e7f2fa}.rst-content .note .admonition-title,.rst-content .note .wy-alert-title,.rst-content .seealso .admonition-title,.rst-content .seealso .wy-alert-title,.rst-content .wy-alert-info.admonition-todo .admonition-title,.rst-content .wy-alert-info.admonition-todo .wy-alert-title,.rst-content .wy-alert-info.admonition .admonition-title,.rst-content .wy-alert-info.admonition .wy-alert-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.wy-alert.wy-alert-info .wy-alert-title{background:#6ab0de}.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.admonition,.rst-content .wy-alert-success.admonition-todo,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.seealso,.rst-content .wy-alert-success.warning,.wy-alert.wy-alert-success{background:#dbfaf4}.rst-content .hint .admonition-title,.rst-content .hint .wy-alert-title,.rst-content .important .admonition-title,.rst-content .important .wy-alert-title,.rst-content .tip .admonition-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.admonition-todo .admonition-title,.rst-content .wy-alert-success.admonition-todo .wy-alert-title,.rst-content .wy-alert-success.admonition .admonition-title,.rst-content .wy-alert-success.admonition .wy-alert-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.seealso .admonition-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.wy-alert.wy-alert-success .wy-alert-title{background:#1abc9c}.rst-content .wy-alert-neutral.admonition,.rst-content .wy-alert-neutral.admonition-todo,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.seealso,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.wy-alert.wy-alert-neutral{background:#f3f6f6}.rst-content .wy-alert-neutral.admonition-todo .admonition-title,.rst-content .wy-alert-neutral.admonition-todo .wy-alert-title,.rst-content .wy-alert-neutral.admonition .admonition-title,.rst-content .wy-alert-neutral.admonition .wy-alert-title,.rst-content .wy-alert-neutral.attention .admonition-title,.rst-content .wy-alert-neutral.attention .wy-alert-title,.rst-content .wy-alert-neutral.caution .admonition-title,.rst-content .wy-alert-neutral.caution .wy-alert-title,.rst-content .wy-alert-neutral.danger .admonition-title,.rst-content .wy-alert-neutral.danger .wy-alert-title,.rst-content .wy-alert-neutral.error .admonition-title,.rst-content .wy-alert-neutral.error .wy-alert-title,.rst-content .wy-alert-neutral.hint .admonition-title,.rst-content .wy-alert-neutral.hint .wy-alert-title,.rst-content .wy-alert-neutral.important .admonition-title,.rst-content .wy-alert-neutral.important .wy-alert-title,.rst-content .wy-alert-neutral.note .admonition-title,.rst-content .wy-alert-neutral.note .wy-alert-title,.rst-content .wy-alert-neutral.seealso .admonition-title,.rst-content .wy-alert-neutral.seealso .wy-alert-title,.rst-content .wy-alert-neutral.tip .admonition-title,.rst-content .wy-alert-neutral.tip .wy-alert-title,.rst-content .wy-alert-neutral.warning .admonition-title,.rst-content .wy-alert-neutral.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-neutral .admonition-title,.wy-alert.wy-alert-neutral .rst-content .admonition-title,.wy-alert.wy-alert-neutral .wy-alert-title{color:#404040;background:#e1e4e5}.rst-content .wy-alert-neutral.admonition-todo a,.rst-content .wy-alert-neutral.admonition a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.seealso a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.wy-alert.wy-alert-neutral a{color:#2980b9}.rst-content .admonition-todo p:last-child,.rst-content .admonition p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .note p:last-child,.rst-content .seealso p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.wy-alert p:last-child{margin-bottom:0}.wy-tray-container{position:fixed;bottom:0;left:0;z-index:600}.wy-tray-container li{display:block;width:300px;background:transparent;color:#fff;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,.1);padding:0 24px;min-width:20%;opacity:0;height:0;line-height:56px;overflow:hidden;-webkit-transition:all .3s ease-in;-moz-transition:all .3s ease-in;transition:all .3s ease-in}.wy-tray-container li.wy-tray-item-success{background:#27ae60}.wy-tray-container li.wy-tray-item-info{background:#2980b9}.wy-tray-container li.wy-tray-item-warning{background:#e67e22}.wy-tray-container li.wy-tray-item-danger{background:#e74c3c}.wy-tray-container li.on{opacity:1;height:56px}@media screen and (max-width:768px){.wy-tray-container{bottom:auto;top:0;width:100%}.wy-tray-container li{width:100%}}button{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle;cursor:pointer;line-height:normal;-webkit-appearance:button;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}.btn{display:inline-block;border-radius:2px;line-height:normal;white-space:nowrap;text-align:center;cursor:pointer;font-size:100%;padding:6px 12px 8px;color:#fff;border:1px solid rgba(0,0,0,.1);background-color:#27ae60;text-decoration:none;font-weight:400;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 2px -1px hsla(0,0%,100%,.5),inset 0 -2px 0 0 rgba(0,0,0,.1);outline-none:false;vertical-align:middle;*display:inline;zoom:1;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-webkit-transition:all .1s linear;-moz-transition:all .1s linear;transition:all .1s linear}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;outline:0}.btn:active{box-shadow:inset 0 -1px 0 0 rgba(0,0,0,.05),inset 0 2px 0 0 rgba(0,0,0,.1);padding:8px 12px 6px}.btn:visited{color:#fff}.btn-disabled,.btn-disabled:active,.btn-disabled:focus,.btn-disabled:hover,.btn:disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980b9!important}.btn-info:hover{background-color:#2e8ece!important}.btn-neutral{background-color:#f3f6f6!important;color:#404040!important}.btn-neutral:hover{background-color:#e5ebeb!important;color:#404040}.btn-neutral:visited{color:#404040!important}.btn-success{background-color:#27ae60!important}.btn-success:hover{background-color:#295!important}.btn-danger{background-color:#e74c3c!important}.btn-danger:hover{background-color:#ea6153!important}.btn-warning{background-color:#e67e22!important}.btn-warning:hover{background-color:#e98b39!important}.btn-invert{background-color:#222}.btn-invert:hover{background-color:#2f2f2f!important}.btn-link{background-color:transparent!important;color:#2980b9;box-shadow:none;border-color:transparent!important}.btn-link:active,.btn-link:hover{background-color:transparent!important;color:#409ad5!important;box-shadow:none}.btn-link:visited{color:#9b59b6}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:after,.wy-btn-group:before{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown-active .wy-dropdown-menu{display:block}.wy-dropdown-menu{position:absolute;left:0;display:none;float:left;top:100%;min-width:100%;background:#fcfcfc;z-index:100;border:1px solid #cfd7dd;box-shadow:0 2px 2px 0 rgba(0,0,0,.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px;cursor:pointer}.wy-dropdown-menu>dd>a:hover{background:#2980b9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:1px solid #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type=search]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-up .wy-dropdown-menu{bottom:100%;top:auto;left:auto;right:0}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980b9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;left:auto;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned .wy-help-inline,.wy-form-aligned input,.wy-form-aligned label,.wy-form-aligned select,.wy-form-aligned textarea{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:6px 12px 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:6px}fieldset{margin:0}fieldset,legend{border:0;padding:0}legend{width:100%;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label,legend{display:block}label{margin:0 0 .3125em;color:#333;font-size:90%}input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}.wy-control-group{margin-bottom:24px;max-width:1200px;margin-left:auto;margin-right:auto;*zoom:1}.wy-control-group:after,.wy-control-group:before{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-required>label:after{content:" *";color:#e74c3c}.wy-control-group .wy-form-full,.wy-control-group .wy-form-halves,.wy-control-group .wy-form-thirds{padding-bottom:12px}.wy-control-group .wy-form-full input[type=color],.wy-control-group .wy-form-full input[type=date],.wy-control-group .wy-form-full input[type=datetime-local],.wy-control-group .wy-form-full input[type=datetime],.wy-control-group .wy-form-full input[type=email],.wy-control-group .wy-form-full input[type=month],.wy-control-group .wy-form-full input[type=number],.wy-control-group .wy-form-full input[type=password],.wy-control-group .wy-form-full input[type=search],.wy-control-group .wy-form-full input[type=tel],.wy-control-group .wy-form-full input[type=text],.wy-control-group .wy-form-full input[type=time],.wy-control-group .wy-form-full input[type=url],.wy-control-group .wy-form-full input[type=week],.wy-control-group .wy-form-full select,.wy-control-group .wy-form-halves input[type=color],.wy-control-group .wy-form-halves input[type=date],.wy-control-group .wy-form-halves input[type=datetime-local],.wy-control-group .wy-form-halves input[type=datetime],.wy-control-group .wy-form-halves input[type=email],.wy-control-group .wy-form-halves input[type=month],.wy-control-group .wy-form-halves input[type=number],.wy-control-group .wy-form-halves input[type=password],.wy-control-group .wy-form-halves input[type=search],.wy-control-group .wy-form-halves input[type=tel],.wy-control-group .wy-form-halves input[type=text],.wy-control-group .wy-form-halves input[type=time],.wy-control-group .wy-form-halves input[type=url],.wy-control-group .wy-form-halves input[type=week],.wy-control-group .wy-form-halves select,.wy-control-group .wy-form-thirds input[type=color],.wy-control-group .wy-form-thirds input[type=date],.wy-control-group .wy-form-thirds input[type=datetime-local],.wy-control-group .wy-form-thirds input[type=datetime],.wy-control-group .wy-form-thirds input[type=email],.wy-control-group .wy-form-thirds input[type=month],.wy-control-group .wy-form-thirds input[type=number],.wy-control-group .wy-form-thirds input[type=password],.wy-control-group .wy-form-thirds input[type=search],.wy-control-group .wy-form-thirds input[type=tel],.wy-control-group .wy-form-thirds input[type=text],.wy-control-group .wy-form-thirds input[type=time],.wy-control-group .wy-form-thirds input[type=url],.wy-control-group .wy-form-thirds input[type=week],.wy-control-group .wy-form-thirds select{width:100%}.wy-control-group .wy-form-full{float:left;display:block;width:100%;margin-right:0}.wy-control-group .wy-form-full:last-child{margin-right:0}.wy-control-group .wy-form-halves{float:left;display:block;margin-right:2.35765%;width:48.82117%}.wy-control-group .wy-form-halves:last-child,.wy-control-group .wy-form-halves:nth-of-type(2n){margin-right:0}.wy-control-group .wy-form-halves:nth-of-type(odd){clear:left}.wy-control-group .wy-form-thirds{float:left;display:block;margin-right:2.35765%;width:31.76157%}.wy-control-group .wy-form-thirds:last-child,.wy-control-group .wy-form-thirds:nth-of-type(3n){margin-right:0}.wy-control-group .wy-form-thirds:nth-of-type(3n+1){clear:left}.wy-control-group.wy-control-group-no-input .wy-control,.wy-control-no-input{margin:6px 0 0;font-size:90%}.wy-control-no-input{display:inline-block}.wy-control-group.fluid-input input[type=color],.wy-control-group.fluid-input input[type=date],.wy-control-group.fluid-input input[type=datetime-local],.wy-control-group.fluid-input input[type=datetime],.wy-control-group.fluid-input input[type=email],.wy-control-group.fluid-input input[type=month],.wy-control-group.fluid-input input[type=number],.wy-control-group.fluid-input input[type=password],.wy-control-group.fluid-input input[type=search],.wy-control-group.fluid-input input[type=tel],.wy-control-group.fluid-input input[type=text],.wy-control-group.fluid-input input[type=time],.wy-control-group.fluid-input input[type=url],.wy-control-group.fluid-input input[type=week]{width:100%}.wy-form-message-inline{padding-left:.3em;color:#666;font-size:90%}.wy-form-message{display:block;color:#999;font-size:70%;margin-top:.3125em;font-style:italic}.wy-form-message p{font-size:inherit;font-style:italic;margin-bottom:6px}.wy-form-message p:last-child{margin-bottom:0}input{line-height:normal}input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;*overflow:visible}input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}input[type=datetime-local]{padding:.34375em .625em}input[disabled]{cursor:default}input[type=checkbox],input[type=radio]{padding:0;margin-right:.3125em;*height:13px;*width:13px}input[type=checkbox],input[type=radio],input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}input[type=color]:focus,input[type=date]:focus,input[type=datetime-local]:focus,input[type=datetime]:focus,input[type=email]:focus,input[type=month]:focus,input[type=number]:focus,input[type=password]:focus,input[type=search]:focus,input[type=tel]:focus,input[type=text]:focus,input[type=time]:focus,input[type=url]:focus,input[type=week]:focus{outline:0;outline:thin dotted\9;border-color:#333}input.no-focus:focus{border-color:#ccc!important}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:thin dotted #333;outline:1px auto #129fea}input[type=color][disabled],input[type=date][disabled],input[type=datetime-local][disabled],input[type=datetime][disabled],input[type=email][disabled],input[type=month][disabled],input[type=number][disabled],input[type=password][disabled],input[type=search][disabled],input[type=tel][disabled],input[type=text][disabled],input[type=time][disabled],input[type=url][disabled],input[type=week][disabled]{cursor:not-allowed;background-color:#fafafa}input:focus:invalid,select:focus:invalid,textarea:focus:invalid{color:#e74c3c;border:1px solid #e74c3c}input:focus:invalid:focus,select:focus:invalid:focus,textarea:focus:invalid:focus{border-color:#e74c3c}input[type=checkbox]:focus:invalid:focus,input[type=file]:focus:invalid:focus,input[type=radio]:focus:invalid:focus{outline-color:#e74c3c}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif}select,textarea{padding:.5em .625em;display:inline-block;border:1px solid #ccc;font-size:80%;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}input[readonly],select[disabled],select[readonly],textarea[disabled],textarea[readonly]{cursor:not-allowed;background-color:#fafafa}input[type=checkbox][disabled],input[type=radio][disabled]{cursor:not-allowed}.wy-checkbox,.wy-radio{margin:6px 0;color:#404040;display:block}.wy-checkbox input,.wy-radio input{vertical-align:baseline}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap;padding:6px}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{line-height:27px;padding:0 8px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:1px solid #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-switch{position:relative;display:block;height:24px;margin-top:12px;cursor:pointer}.wy-switch:before{left:0;top:0;width:36px;height:12px;background:#ccc}.wy-switch:after,.wy-switch:before{position:absolute;content:"";display:block;border-radius:4px;-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.wy-switch:after{width:18px;height:18px;background:#999;left:-3px;top:-3px}.wy-switch span{position:absolute;left:48px;display:block;font-size:12px;color:#ccc;line-height:1}.wy-switch.active:before{background:#1e8449}.wy-switch.active:after{left:24px;background:#27ae60}.wy-switch.disabled{cursor:not-allowed;opacity:.8}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error>label{color:#e74c3c}.wy-control-group.wy-control-group-error input[type=color],.wy-control-group.wy-control-group-error input[type=date],.wy-control-group.wy-control-group-error input[type=datetime-local],.wy-control-group.wy-control-group-error input[type=datetime],.wy-control-group.wy-control-group-error input[type=email],.wy-control-group.wy-control-group-error input[type=month],.wy-control-group.wy-control-group-error input[type=number],.wy-control-group.wy-control-group-error input[type=password],.wy-control-group.wy-control-group-error input[type=search],.wy-control-group.wy-control-group-error input[type=tel],.wy-control-group.wy-control-group-error input[type=text],.wy-control-group.wy-control-group-error input[type=time],.wy-control-group.wy-control-group-error input[type=url],.wy-control-group.wy-control-group-error input[type=week],.wy-control-group.wy-control-group-error textarea{border:1px solid #e74c3c}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:.5em .625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27ae60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#e74c3c}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#e67e22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980b9}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}@media only screen and (max-width:480px){.wy-form button[type=submit]{margin:.7em 0 0}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=text],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week],.wy-form label{margin-bottom:.3em;display:block}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-control{margin:1.5em 0 0}.wy-form-message,.wy-form-message-inline,.wy-form .wy-help-inline{display:block;font-size:80%;padding:6px 0}}@media screen and (max-width:768px){.tablet-hide{display:none}}@media screen and (max-width:480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.rst-content table.docutils,.rst-content table.field-list,.wy-table{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.rst-content table.docutils caption,.rst-content table.field-list caption,.wy-table caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.rst-content table.docutils td,.rst-content table.docutils th,.rst-content table.field-list td,.rst-content table.field-list th,.wy-table td,.wy-table th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.rst-content table.docutils td:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list td:first-child,.rst-content table.field-list th:first-child,.wy-table td:first-child,.wy-table th:first-child{border-left-width:0}.rst-content table.docutils thead,.rst-content table.field-list thead,.wy-table thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.rst-content table.docutils thead th,.rst-content table.field-list thead th,.wy-table thead th{font-weight:700;border-bottom:2px solid #e1e4e5}.rst-content table.docutils td,.rst-content table.field-list td,.wy-table td{background-color:transparent;vertical-align:middle}.rst-content table.docutils td p,.rst-content table.field-list td p,.wy-table td p{line-height:18px}.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child,.wy-table td p:last-child{margin-bottom:0}.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min,.wy-table .wy-table-cell-min{width:1%;padding-right:0}.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:grey;font-size:90%}.wy-table-tertiary{color:grey;font-size:80%}.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td,.wy-table-backed,.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td{background-color:#f3f6f6}.rst-content table.docutils,.wy-table-bordered-all{border:1px solid #e1e4e5}.rst-content table.docutils td,.wy-table-bordered-all td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.rst-content table.docutils tbody>tr:last-child td,.wy-table-bordered-all tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0!important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}a{color:#2980b9;text-decoration:none;cursor:pointer}a:hover{color:#3091d1}a:visited{color:#9b59b6}html{height:100%}body,html{overflow-x:hidden}body{font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;font-weight:400;color:#404040;min-height:100%;background:#edf0f2}.wy-text-left{text-align:left}.wy-text-center{text-align:center}.wy-text-right{text-align:right}.wy-text-large{font-size:120%}.wy-text-normal{font-size:100%}.wy-text-small,small{font-size:80%}.wy-text-strike{text-decoration:line-through}.wy-text-warning{color:#e67e22!important}a.wy-text-warning:hover{color:#eb9950!important}.wy-text-info{color:#2980b9!important}a.wy-text-info:hover{color:#409ad5!important}.wy-text-success{color:#27ae60!important}a.wy-text-success:hover{color:#36d278!important}.wy-text-danger{color:#e74c3c!important}a.wy-text-danger:hover{color:#ed7669!important}.wy-text-neutral{color:#404040!important}a.wy-text-neutral:hover{color:#595959!important}.rst-content .toctree-wrapper>p.caption,h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif}p{line-height:24px;font-size:16px;margin:0 0 24px}h1{font-size:175%}.rst-content .toctree-wrapper>p.caption,h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}hr{display:block;height:1px;border:0;border-top:1px solid #e1e4e5;margin:24px 0;padding:0}.rst-content code,.rst-content tt,code{white-space:nowrap;max-width:100%;background:#fff;border:1px solid #e1e4e5;font-size:75%;padding:0 5px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#e74c3c;overflow-x:auto}.rst-content tt.code-large,code.code-large{font-size:90%}.rst-content .section ul,.rst-content .toctree-wrapper ul,.rst-content section ul,.wy-plain-list-disc,article ul{list-style:disc;line-height:24px;margin-bottom:24px}.rst-content .section ul li,.rst-content .toctree-wrapper ul li,.rst-content section ul li,.wy-plain-list-disc li,article ul li{list-style:disc;margin-left:24px}.rst-content .section ul li p:last-child,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li p:last-child,.rst-content .toctree-wrapper ul li ul,.rst-content section ul li p:last-child,.rst-content section ul li ul,.wy-plain-list-disc li p:last-child,.wy-plain-list-disc li ul,article ul li p:last-child,article ul li ul{margin-bottom:0}.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li,.rst-content section ul li li,.wy-plain-list-disc li li,article ul li li{list-style:circle}.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li,.rst-content section ul li li li,.wy-plain-list-disc li li li,article ul li li li{list-style:square}.rst-content .section ul li ol li,.rst-content .toctree-wrapper ul li ol li,.rst-content section ul li ol li,.wy-plain-list-disc li ol li,article ul li ol li{list-style:decimal}.rst-content .section ol,.rst-content .section ol.arabic,.rst-content .toctree-wrapper ol,.rst-content .toctree-wrapper ol.arabic,.rst-content section ol,.rst-content section ol.arabic,.wy-plain-list-decimal,article ol{list-style:decimal;line-height:24px;margin-bottom:24px}.rst-content .section ol.arabic li,.rst-content .section ol li,.rst-content .toctree-wrapper ol.arabic li,.rst-content .toctree-wrapper ol li,.rst-content section ol.arabic li,.rst-content section ol li,.wy-plain-list-decimal li,article ol li{list-style:decimal;margin-left:24px}.rst-content .section ol.arabic li ul,.rst-content .section ol li p:last-child,.rst-content .section ol li ul,.rst-content .toctree-wrapper ol.arabic li ul,.rst-content .toctree-wrapper ol li p:last-child,.rst-content .toctree-wrapper ol li ul,.rst-content section ol.arabic li ul,.rst-content section ol li p:last-child,.rst-content section ol li ul,.wy-plain-list-decimal li p:last-child,.wy-plain-list-decimal li ul,article ol li p:last-child,article ol li ul{margin-bottom:0}.rst-content .section ol.arabic li ul li,.rst-content .section ol li ul li,.rst-content .toctree-wrapper ol.arabic li ul li,.rst-content .toctree-wrapper ol li ul li,.rst-content section ol.arabic li ul li,.rst-content section ol li ul li,.wy-plain-list-decimal li ul li,article ol li ul li{list-style:disc}.wy-breadcrumbs{*zoom:1}.wy-breadcrumbs:after,.wy-breadcrumbs:before{display:table;content:""}.wy-breadcrumbs:after{clear:both}.wy-breadcrumbs>li{display:inline-block;padding-top:5px}.wy-breadcrumbs>li.wy-breadcrumbs-aside{float:right}.rst-content .wy-breadcrumbs>li code,.rst-content .wy-breadcrumbs>li tt,.wy-breadcrumbs>li .rst-content tt,.wy-breadcrumbs>li code{all:inherit;color:inherit}.breadcrumb-item:before{content:"/";color:#bbb;font-size:13px;padding:0 6px 0 3px}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width:480px){.wy-breadcrumbs-extra,.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}html{font-size:16px}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:after,.wy-menu-horiz:before{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz li,.wy-menu-horiz ul{display:inline-block}.wy-menu-horiz li:hover{background:hsla(0,0%,100%,.1)}.wy-menu-horiz li.divide-left{border-left:1px solid #404040}.wy-menu-horiz li.divide-right{border-right:1px solid #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical{width:300px}.wy-menu-vertical header,.wy-menu-vertical p.caption{color:#55a5d9;height:32px;line-height:32px;padding:0 1.618em;margin:12px 0 0;display:block;font-weight:700;text-transform:uppercase;font-size:85%;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:1px solid #404040}.wy-menu-vertical li.divide-bottom{border-bottom:1px solid #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:grey;border-right:1px solid #c9c9c9;padding:.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.rst-content .wy-menu-vertical li tt,.wy-menu-vertical li .rst-content tt,.wy-menu-vertical li code{border:none;background:inherit;color:inherit;padding-left:0;padding-right:0}.wy-menu-vertical li button.toctree-expand{display:block;float:left;margin-left:-1.2em;line-height:18px;color:#4d4d4d;border:none;background:none;padding:0}.wy-menu-vertical li.current>a,.wy-menu-vertical li.on a{color:#404040;font-weight:700;position:relative;background:#fcfcfc;border:none;padding:.4045em 1.618em}.wy-menu-vertical li.current>a:hover,.wy-menu-vertical li.on a:hover{background:#fcfcfc}.wy-menu-vertical li.current>a:hover button.toctree-expand,.wy-menu-vertical li.on a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand{display:block;line-height:18px;color:#333}.wy-menu-vertical li.toctree-l1.current>a{border-bottom:1px solid #c9c9c9;border-top:1px solid #c9c9c9}.wy-menu-vertical .toctree-l1.current .toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .toctree-l11>ul{display:none}.wy-menu-vertical .toctree-l1.current .current.toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .current.toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .current.toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .current.toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .current.toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .current.toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .current.toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .current.toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .current.toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .current.toctree-l11>ul{display:block}.wy-menu-vertical li.toctree-l3,.wy-menu-vertical li.toctree-l4{font-size:.9em}.wy-menu-vertical li.toctree-l2 a,.wy-menu-vertical li.toctree-l3 a,.wy-menu-vertical li.toctree-l4 a,.wy-menu-vertical li.toctree-l5 a,.wy-menu-vertical li.toctree-l6 a,.wy-menu-vertical li.toctree-l7 a,.wy-menu-vertical li.toctree-l8 a,.wy-menu-vertical li.toctree-l9 a,.wy-menu-vertical li.toctree-l10 a{color:#404040}.wy-menu-vertical li.toctree-l2 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l3 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l4 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l5 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l6 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l7 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l8 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l9 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l10 a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a,.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a,.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a,.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a,.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a,.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a,.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a,.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{display:block}.wy-menu-vertical li.toctree-l2.current>a{padding:.4045em 2.427em}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{padding:.4045em 1.618em .4045em 4.045em}.wy-menu-vertical li.toctree-l3.current>a{padding:.4045em 4.045em}.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{padding:.4045em 1.618em .4045em 5.663em}.wy-menu-vertical li.toctree-l4.current>a{padding:.4045em 5.663em}.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a{padding:.4045em 1.618em .4045em 7.281em}.wy-menu-vertical li.toctree-l5.current>a{padding:.4045em 7.281em}.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a{padding:.4045em 1.618em .4045em 8.899em}.wy-menu-vertical li.toctree-l6.current>a{padding:.4045em 8.899em}.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a{padding:.4045em 1.618em .4045em 10.517em}.wy-menu-vertical li.toctree-l7.current>a{padding:.4045em 10.517em}.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a{padding:.4045em 1.618em .4045em 12.135em}.wy-menu-vertical li.toctree-l8.current>a{padding:.4045em 12.135em}.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a{padding:.4045em 1.618em .4045em 13.753em}.wy-menu-vertical li.toctree-l9.current>a{padding:.4045em 13.753em}.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a{padding:.4045em 1.618em .4045em 15.371em}.wy-menu-vertical li.toctree-l10.current>a{padding:.4045em 15.371em}.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{padding:.4045em 1.618em .4045em 16.989em}.wy-menu-vertical li.toctree-l2.current>a,.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{background:#c9c9c9}.wy-menu-vertical li.toctree-l2 button.toctree-expand{color:#a3a3a3}.wy-menu-vertical li.toctree-l3.current>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{background:#bdbdbd}.wy-menu-vertical li.toctree-l3 button.toctree-expand{color:#969696}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical li ul li a{margin-bottom:0;color:#d9d9d9;font-weight:400}.wy-menu-vertical a{line-height:18px;padding:.4045em 1.618em;display:block;position:relative;font-size:90%;color:#d9d9d9}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:hover button.toctree-expand{color:#d9d9d9}.wy-menu-vertical a:active{background-color:#2980b9;cursor:pointer;color:#fff}.wy-menu-vertical a:active button.toctree-expand{color:#fff}.wy-side-nav-search{display:block;width:300px;padding:.809em;margin-bottom:.809em;z-index:200;background-color:#2980b9;text-align:center;color:#fcfcfc}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto .809em;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-side-nav-search .wy-dropdown>a,.wy-side-nav-search>a{color:#fcfcfc;font-size:100%;font-weight:700;display:inline-block;padding:4px 6px;margin-bottom:.809em;max-width:100%}.wy-side-nav-search .wy-dropdown>a:hover,.wy-side-nav-search .wy-dropdown>aactive,.wy-side-nav-search .wy-dropdown>afocus,.wy-side-nav-search>a:hover,.wy-side-nav-search>aactive,.wy-side-nav-search>afocus{background:hsla(0,0%,100%,.1)}.wy-side-nav-search .wy-dropdown>a img.logo,.wy-side-nav-search>a img.logo{display:block;margin:0 auto;height:auto;width:auto;border-radius:0;max-width:100%;background:transparent}.wy-side-nav-search .wy-dropdown>a.icon,.wy-side-nav-search>a.icon{display:block}.wy-side-nav-search .wy-dropdown>a.icon img.logo,.wy-side-nav-search>a.icon img.logo{margin-top:.85em}.wy-side-nav-search>div.switch-menus{position:relative;display:block;margin-top:-.4045em;margin-bottom:.809em;font-weight:400;color:hsla(0,0%,100%,.3)}.wy-side-nav-search>div.switch-menus>div.language-switch,.wy-side-nav-search>div.switch-menus>div.version-switch{display:inline-block;padding:.2em}.wy-side-nav-search>div.switch-menus>div.language-switch select,.wy-side-nav-search>div.switch-menus>div.version-switch select{display:inline-block;margin-right:-2rem;padding-right:2rem;max-width:240px;text-align-last:center;background:none;border:none;border-radius:0;box-shadow:none;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;font-size:1em;font-weight:400;color:hsla(0,0%,100%,.3);cursor:pointer;appearance:none;-webkit-appearance:none;-moz-appearance:none}.wy-side-nav-search>div.switch-menus>div.language-switch select:active,.wy-side-nav-search>div.switch-menus>div.language-switch select:focus,.wy-side-nav-search>div.switch-menus>div.language-switch select:hover,.wy-side-nav-search>div.switch-menus>div.version-switch select:active,.wy-side-nav-search>div.switch-menus>div.version-switch select:focus,.wy-side-nav-search>div.switch-menus>div.version-switch select:hover{background:hsla(0,0%,100%,.1);color:hsla(0,0%,100%,.5)}.wy-side-nav-search>div.switch-menus>div.language-switch select option,.wy-side-nav-search>div.switch-menus>div.version-switch select option{color:#000}.wy-side-nav-search>div.switch-menus>div.language-switch:has(>select):after,.wy-side-nav-search>div.switch-menus>div.version-switch:has(>select):after{display:inline-block;width:1.5em;height:100%;padding:.1em;content:"\f0d7";font-size:1em;line-height:1.2em;font-family:FontAwesome;text-align:center;pointer-events:none;box-sizing:border-box}.wy-nav .wy-menu-vertical header{color:#2980b9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980b9;color:#fff}[data-menu-wrap]{-webkit-transition:all .2s ease-in;-moz-transition:all .2s ease-in;transition:all .2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:#fcfcfc}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:fixed;top:0;bottom:0;left:0;padding-bottom:2em;width:300px;overflow-x:hidden;overflow-y:hidden;min-height:100%;color:#9b9b9b;background:#343131;z-index:200}.wy-side-scroll{width:320px;position:relative;overflow-x:hidden;overflow-y:scroll;height:100%}.wy-nav-top{display:none;background:#2980b9;color:#fff;padding:.4045em .809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:after,.wy-nav-top:before{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:700}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer;padding-top:inherit}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:grey}footer p{margin-bottom:12px}.rst-content footer span.commit tt,footer span.commit .rst-content tt,footer span.commit code{padding:0;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:1em;background:none;border:none;color:grey}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:after,.rst-footer-buttons:before{width:100%;display:table;content:""}.rst-footer-buttons:after{clear:both}.rst-breadcrumbs-buttons{margin-top:12px;*zoom:1}.rst-breadcrumbs-buttons:after,.rst-breadcrumbs-buttons:before{display:table;content:""}.rst-breadcrumbs-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:1px solid #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:1px solid #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:grey;font-size:90%}.genindextable li>ul{margin-left:24px}@media screen and (max-width:768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-menu.wy-menu-vertical,.wy-side-nav-search,.wy-side-scroll{width:auto}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width:1100px){.wy-nav-content-wrap{background:rgba(0,0,0,.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.rst-versions,.wy-nav-side,footer{display:none}.wy-nav-content-wrap{margin-left:0}}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:after,.rst-versions .rst-current-version:before{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-content .code-block-caption .rst-versions .rst-current-version .headerlink,.rst-content .eqno .rst-versions .rst-current-version .headerlink,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-content code.download .rst-versions .rst-current-version span:first-child,.rst-content dl dt .rst-versions .rst-current-version .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-content p .rst-versions .rst-current-version .headerlink,.rst-content table>caption .rst-versions .rst-current-version .headerlink,.rst-content tt.download .rst-versions .rst-current-version span:first-child,.rst-versions .rst-current-version .fa,.rst-versions .rst-current-version .icon,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-versions .rst-current-version .rst-content .code-block-caption .headerlink,.rst-versions .rst-current-version .rst-content .eqno .headerlink,.rst-versions .rst-current-version .rst-content code.download span:first-child,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-versions .rst-current-version .rst-content p .headerlink,.rst-versions .rst-current-version .rst-content table>caption .headerlink,.rst-versions .rst-current-version .rst-content tt.download span:first-child,.rst-versions .rst-current-version .wy-menu-vertical li button.toctree-expand,.wy-menu-vertical li .rst-versions .rst-current-version button.toctree-expand{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions .rst-other-versions .rtd-current-item{font-weight:700}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}#flyout-search-form{padding:6px}.rst-content .toctree-wrapper>p.caption,.rst-content h1,.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6{margin-bottom:24px}.rst-content img{max-width:100%;height:auto}.rst-content div.figure,.rst-content figure{margin-bottom:24px}.rst-content div.figure .caption-text,.rst-content figure .caption-text{font-style:italic}.rst-content div.figure p:last-child.caption,.rst-content figure p:last-child.caption{margin-bottom:0}.rst-content div.figure.align-center,.rst-content figure.align-center{text-align:center}.rst-content .section>a>img,.rst-content .section>img,.rst-content section>a>img,.rst-content section>img{margin-bottom:24px}.rst-content abbr[title]{text-decoration:none}.rst-content.style-external-links a.reference.external:after{font-family:FontAwesome;content:"\f08e";color:#b3b3b3;vertical-align:super;font-size:60%;margin:0 .2em}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content pre.literal-block{white-space:pre;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;display:block;overflow:auto}.rst-content div[class^=highlight],.rst-content pre.literal-block{border:1px solid #e1e4e5;overflow-x:auto;margin:1px 0 24px}.rst-content div[class^=highlight] div[class^=highlight],.rst-content pre.literal-block div[class^=highlight]{padding:0;border:none;margin:0}.rst-content div[class^=highlight] td.code{width:100%}.rst-content .linenodiv pre{border-right:1px solid #e6e9ea;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;user-select:none;pointer-events:none}.rst-content div[class^=highlight] pre{white-space:pre;margin:0;padding:12px;display:block;overflow:auto}.rst-content div[class^=highlight] pre .hll{display:block;margin:0 -12px;padding:0 12px}.rst-content .linenodiv pre,.rst-content div[class^=highlight] pre,.rst-content pre.literal-block{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:12px;line-height:1.4}.rst-content div.highlight .gp,.rst-content div.highlight span.linenos{user-select:none;pointer-events:none}.rst-content div.highlight span.linenos{display:inline-block;padding-left:0;padding-right:12px;margin-right:12px;border-right:1px solid #e6e9ea}.rst-content .code-block-caption{font-style:italic;font-size:85%;line-height:1;padding:1em 0;text-align:center}@media print{.rst-content .codeblock,.rst-content div[class^=highlight],.rst-content div[class^=highlight] pre{white-space:pre-wrap}}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning{clear:both}.rst-content .admonition-todo .last,.rst-content .admonition-todo>:last-child,.rst-content .admonition .last,.rst-content .admonition>:last-child,.rst-content .attention .last,.rst-content .attention>:last-child,.rst-content .caution .last,.rst-content .caution>:last-child,.rst-content .danger .last,.rst-content .danger>:last-child,.rst-content .error .last,.rst-content .error>:last-child,.rst-content .hint .last,.rst-content .hint>:last-child,.rst-content .important .last,.rst-content .important>:last-child,.rst-content .note .last,.rst-content .note>:last-child,.rst-content .seealso .last,.rst-content .seealso>:last-child,.rst-content .tip .last,.rst-content .tip>:last-child,.rst-content .warning .last,.rst-content .warning>:last-child{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent!important;border-color:rgba(0,0,0,.1)!important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha>li,.rst-content .toctree-wrapper ol.loweralpha,.rst-content .toctree-wrapper ol.loweralpha>li,.rst-content section ol.loweralpha,.rst-content section ol.loweralpha>li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha>li,.rst-content .toctree-wrapper ol.upperalpha,.rst-content .toctree-wrapper ol.upperalpha>li,.rst-content section ol.upperalpha,.rst-content section ol.upperalpha>li{list-style:upper-alpha}.rst-content .section ol li>*,.rst-content .section ul li>*,.rst-content .toctree-wrapper ol li>*,.rst-content .toctree-wrapper ul li>*,.rst-content section ol li>*,.rst-content section ul li>*{margin-top:12px;margin-bottom:12px}.rst-content .section ol li>:first-child,.rst-content .section ul li>:first-child,.rst-content .toctree-wrapper ol li>:first-child,.rst-content .toctree-wrapper ul li>:first-child,.rst-content section ol li>:first-child,.rst-content section ul li>:first-child{margin-top:0}.rst-content .section ol li>p,.rst-content .section ol li>p:last-child,.rst-content .section ul li>p,.rst-content .section ul li>p:last-child,.rst-content .toctree-wrapper ol li>p,.rst-content .toctree-wrapper ol li>p:last-child,.rst-content .toctree-wrapper ul li>p,.rst-content .toctree-wrapper ul li>p:last-child,.rst-content section ol li>p,.rst-content section ol li>p:last-child,.rst-content section ul li>p,.rst-content section ul li>p:last-child{margin-bottom:12px}.rst-content .section ol li>p:only-child,.rst-content .section ol li>p:only-child:last-child,.rst-content .section ul li>p:only-child,.rst-content .section ul li>p:only-child:last-child,.rst-content .toctree-wrapper ol li>p:only-child,.rst-content .toctree-wrapper ol li>p:only-child:last-child,.rst-content .toctree-wrapper ul li>p:only-child,.rst-content .toctree-wrapper ul li>p:only-child:last-child,.rst-content section ol li>p:only-child,.rst-content section ol li>p:only-child:last-child,.rst-content section ul li>p:only-child,.rst-content section ul li>p:only-child:last-child{margin-bottom:0}.rst-content .section ol li>ol,.rst-content .section ol li>ul,.rst-content .section ul li>ol,.rst-content .section ul li>ul,.rst-content .toctree-wrapper ol li>ol,.rst-content .toctree-wrapper ol li>ul,.rst-content .toctree-wrapper ul li>ol,.rst-content .toctree-wrapper ul li>ul,.rst-content section ol li>ol,.rst-content section ol li>ul,.rst-content section ul li>ol,.rst-content section ul li>ul{margin-bottom:12px}.rst-content .section ol.simple li>*,.rst-content .section ol.simple li ol,.rst-content .section ol.simple li ul,.rst-content .section ul.simple li>*,.rst-content .section ul.simple li ol,.rst-content .section ul.simple li ul,.rst-content .toctree-wrapper ol.simple li>*,.rst-content .toctree-wrapper ol.simple li ol,.rst-content .toctree-wrapper ol.simple li ul,.rst-content .toctree-wrapper ul.simple li>*,.rst-content .toctree-wrapper ul.simple li ol,.rst-content .toctree-wrapper ul.simple li ul,.rst-content section ol.simple li>*,.rst-content section ol.simple li ol,.rst-content section ol.simple li ul,.rst-content section ul.simple li>*,.rst-content section ul.simple li ol,.rst-content section ul.simple li ul{margin-top:0;margin-bottom:0}.rst-content .line-block{margin-left:0;margin-bottom:24px;line-height:24px}.rst-content .line-block .line-block{margin-left:24px;margin-bottom:0}.rst-content .topic-title{font-weight:700;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0 0 24px 24px}.rst-content .align-left{float:left;margin:0 24px 24px 0}.rst-content .align-center{margin:auto}.rst-content .align-center:not(table){display:block}.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink{opacity:0;font-size:14px;font-family:FontAwesome;margin-left:.5em}.rst-content .code-block-caption .headerlink:focus,.rst-content .code-block-caption:hover .headerlink,.rst-content .eqno .headerlink:focus,.rst-content .eqno:hover .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink:focus,.rst-content .toctree-wrapper>p.caption:hover .headerlink,.rst-content dl dt .headerlink:focus,.rst-content dl dt:hover .headerlink,.rst-content h1 .headerlink:focus,.rst-content h1:hover .headerlink,.rst-content h2 .headerlink:focus,.rst-content h2:hover .headerlink,.rst-content h3 .headerlink:focus,.rst-content h3:hover .headerlink,.rst-content h4 .headerlink:focus,.rst-content h4:hover .headerlink,.rst-content h5 .headerlink:focus,.rst-content h5:hover .headerlink,.rst-content h6 .headerlink:focus,.rst-content h6:hover .headerlink,.rst-content p.caption .headerlink:focus,.rst-content p.caption:hover .headerlink,.rst-content p .headerlink:focus,.rst-content p:hover .headerlink,.rst-content table>caption .headerlink:focus,.rst-content table>caption:hover .headerlink{opacity:1}.rst-content p a{overflow-wrap:anywhere}.rst-content .wy-table td p,.rst-content .wy-table td ul,.rst-content .wy-table th p,.rst-content .wy-table th ul,.rst-content table.docutils td p,.rst-content table.docutils td ul,.rst-content table.docutils th p,.rst-content table.docutils th ul,.rst-content table.field-list td p,.rst-content table.field-list td ul,.rst-content table.field-list th p,.rst-content table.field-list th ul{font-size:inherit}.rst-content .btn:focus{outline:2px solid}.rst-content table>caption .headerlink:after{font-size:12px}.rst-content .centered{text-align:center}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:1px solid #e1e4e5}.rst-content .sidebar dl,.rst-content .sidebar p,.rst-content .sidebar ul{font-size:90%}.rst-content .sidebar .last,.rst-content .sidebar>:last-child{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif;font-weight:700;background:#e1e4e5;padding:6px 12px;margin:-24px -24px 24px;font-size:100%}.rst-content .highlighted{background:#f1c40f;box-shadow:0 0 0 2px #f1c40f;display:inline;font-weight:700}.rst-content .citation-reference,.rst-content .footnote-reference{vertical-align:baseline;position:relative;top:-.4em;line-height:0;font-size:90%}.rst-content .citation-reference>span.fn-bracket,.rst-content .footnote-reference>span.fn-bracket{display:none}.rst-content .hlist{width:100%}.rst-content dl dt span.classifier:before{content:" : "}.rst-content dl dt span.classifier-delimiter{display:none!important}html.writer-html4 .rst-content table.docutils.citation,html.writer-html4 .rst-content table.docutils.footnote{background:none;border:none}html.writer-html4 .rst-content table.docutils.citation td,html.writer-html4 .rst-content table.docutils.citation tr,html.writer-html4 .rst-content table.docutils.footnote td,html.writer-html4 .rst-content table.docutils.footnote tr{border:none;background-color:transparent!important;white-space:normal}html.writer-html4 .rst-content table.docutils.citation td.label,html.writer-html4 .rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{display:grid;grid-template-columns:auto minmax(80%,95%)}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{display:inline-grid;grid-template-columns:max-content auto}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{display:grid;grid-template-columns:auto auto minmax(.65rem,auto) minmax(40%,95%)}html.writer-html5 .rst-content aside.citation>span.label,html.writer-html5 .rst-content aside.footnote>span.label,html.writer-html5 .rst-content div.citation>span.label{grid-column-start:1;grid-column-end:2}html.writer-html5 .rst-content aside.citation>span.backrefs,html.writer-html5 .rst-content aside.footnote>span.backrefs,html.writer-html5 .rst-content div.citation>span.backrefs{grid-column-start:2;grid-column-end:3;grid-row-start:1;grid-row-end:3}html.writer-html5 .rst-content aside.citation>p,html.writer-html5 .rst-content aside.footnote>p,html.writer-html5 .rst-content div.citation>p{grid-column-start:4;grid-column-end:5}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{margin-bottom:24px}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{padding-left:1rem}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dd,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dd,html.writer-html5 .rst-content dl.footnote>dt{margin-bottom:0}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{font-size:.9rem}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.footnote>dt{margin:0 .5rem .5rem 0;line-height:1.2rem;word-break:break-all;font-weight:400}html.writer-html5 .rst-content dl.citation>dt>span.brackets:before,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:before{content:"["}html.writer-html5 .rst-content dl.citation>dt>span.brackets:after,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:after{content:"]"}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a{word-break:keep-all}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a:not(:first-child):before,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.footnote>dd{margin:0 0 .5rem;line-height:1.2rem}html.writer-html5 .rst-content dl.citation>dd p,html.writer-html5 .rst-content dl.footnote>dd p{font-size:.9rem}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{padding-left:1rem;padding-right:1rem;font-size:.9rem;line-height:1.2rem}html.writer-html5 .rst-content aside.citation p,html.writer-html5 .rst-content aside.footnote p,html.writer-html5 .rst-content div.citation p{font-size:.9rem;line-height:1.2rem;margin-bottom:12px}html.writer-html5 .rst-content aside.citation span.backrefs,html.writer-html5 .rst-content aside.footnote span.backrefs,html.writer-html5 .rst-content div.citation span.backrefs{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content aside.citation span.backrefs>a,html.writer-html5 .rst-content aside.footnote span.backrefs>a,html.writer-html5 .rst-content div.citation span.backrefs>a{word-break:keep-all}html.writer-html5 .rst-content aside.citation span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content aside.footnote span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content div.citation span.backrefs>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content aside.citation span.label,html.writer-html5 .rst-content aside.footnote span.label,html.writer-html5 .rst-content div.citation span.label{line-height:1.2rem}html.writer-html5 .rst-content aside.citation-list,html.writer-html5 .rst-content aside.footnote-list,html.writer-html5 .rst-content div.citation-list{margin-bottom:24px}html.writer-html5 .rst-content dl.option-list kbd{font-size:.9rem}.rst-content table.docutils.footnote,html.writer-html4 .rst-content table.docutils.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content aside.footnote-list aside.footnote,html.writer-html5 .rst-content div.citation-list>div.citation,html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{color:grey}.rst-content table.docutils.footnote code,.rst-content table.docutils.footnote tt,html.writer-html4 .rst-content table.docutils.citation code,html.writer-html4 .rst-content table.docutils.citation tt,html.writer-html5 .rst-content aside.footnote-list aside.footnote code,html.writer-html5 .rst-content aside.footnote-list aside.footnote tt,html.writer-html5 .rst-content aside.footnote code,html.writer-html5 .rst-content aside.footnote tt,html.writer-html5 .rst-content div.citation-list>div.citation code,html.writer-html5 .rst-content div.citation-list>div.citation tt,html.writer-html5 .rst-content dl.citation code,html.writer-html5 .rst-content dl.citation tt,html.writer-html5 .rst-content dl.footnote code,html.writer-html5 .rst-content dl.footnote tt{color:#555}.rst-content .wy-table-responsive.citation,.rst-content .wy-table-responsive.footnote{margin-bottom:0}.rst-content .wy-table-responsive.citation+:not(.citation),.rst-content .wy-table-responsive.footnote+:not(.footnote){margin-top:24px}.rst-content .wy-table-responsive.citation:last-child,.rst-content .wy-table-responsive.footnote:last-child{margin-bottom:24px}.rst-content table.docutils th{border-color:#e1e4e5}html.writer-html5 .rst-content table.docutils th{border:1px solid #e1e4e5}html.writer-html5 .rst-content table.docutils td>p,html.writer-html5 .rst-content table.docutils th>p{line-height:1rem;margin-bottom:0;font-size:.9rem}.rst-content table.docutils td .last,.rst-content table.docutils td .last>:last-child{margin-bottom:0}.rst-content table.field-list,.rst-content table.field-list td{border:none}.rst-content table.field-list td p{line-height:inherit}.rst-content table.field-list td>strong{display:inline-block}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left}.rst-content code,.rst-content tt{color:#000;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;padding:2px 5px}.rst-content code big,.rst-content code em,.rst-content tt big,.rst-content tt em{font-size:100%!important;line-height:normal}.rst-content code.literal,.rst-content tt.literal{color:#e74c3c;white-space:normal}.rst-content code.xref,.rst-content tt.xref,a .rst-content code,a .rst-content tt{font-weight:700;color:#404040;overflow-wrap:normal}.rst-content kbd,.rst-content pre,.rst-content samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace}.rst-content a code,.rst-content a tt{color:#2980b9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:700;margin-bottom:12px}.rst-content dl ol,.rst-content dl p,.rst-content dl table,.rst-content dl ul{margin-bottom:12px}.rst-content dl dd{margin:0 0 12px 24px;line-height:24px}.rst-content dl dd>ol:last-child,.rst-content dl dd>p:last-child,.rst-content dl dd>table:last-child,.rst-content dl dd>ul:last-child{margin-bottom:0}html.writer-html4 .rst-content dl:not(.docutils),html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple){margin-bottom:24px}html.writer-html4 .rst-content dl:not(.docutils)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{display:table;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980b9;border-top:3px solid #6ab0de;padding:6px;position:relative}html.writer-html4 .rst-content dl:not(.docutils)>dt:before,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:before{color:#6ab0de}html.writer-html4 .rst-content dl:not(.docutils)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{margin-bottom:6px;border:none;border-left:3px solid #ccc;background:#f0f0f0;color:#555}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils)>dt:first-child,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:first-child{margin-top:0}html.writer-html4 .rst-content dl:not(.docutils) code.descclassname,html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descclassname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{background-color:transparent;border:none;padding:0;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .optional,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .property,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .property{display:inline-block;padding-right:8px;max-width:100%}html.writer-html4 .rst-content dl:not(.docutils) .k,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .k{font-style:italic}html.writer-html4 .rst-content dl:not(.docutils) .descclassname,html.writer-html4 .rst-content dl:not(.docutils) .descname,html.writer-html4 .rst-content dl:not(.docutils) .sig-name,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .sig-name{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#000}.rst-content .viewcode-back,.rst-content .viewcode-link{display:inline-block;color:#27ae60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}.rst-content p.rubric{margin-bottom:12px;font-weight:700}.rst-content code.download,.rst-content tt.download{background:inherit;padding:inherit;font-weight:400;font-family:inherit;font-size:inherit;color:inherit;border:inherit;white-space:inherit}.rst-content code.download span:first-child,.rst-content tt.download span:first-child{-webkit-font-smoothing:subpixel-antialiased}.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{margin-right:4px}.rst-content .guilabel,.rst-content .menuselection{font-size:80%;font-weight:700;border-radius:4px;padding:2.4px 6px;margin:auto 2px}.rst-content .guilabel,.rst-content .menuselection{border:1px solid #7fbbe3;background:#e7f2fa}.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>.kbd,.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>kbd{color:inherit;font-size:80%;background-color:#fff;border:1px solid #a6a6a6;border-radius:4px;box-shadow:0 2px grey;padding:2.4px 6px;margin:auto 0}.rst-content .versionmodified{font-style:italic}@media screen and (max-width:480px){.rst-content .sidebar{width:100%}}span[id*=MathJax-Span]{color:#404040}.math{text-align:center}@font-face{font-family:Lato;src:url(fonts/lato-normal.woff2?bd03a2cc277bbbc338d464e679fe9942) format("woff2"),url(fonts/lato-normal.woff?27bd77b9162d388cb8d4c4217c7c5e2a) format("woff");font-weight:400;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold.woff2?cccb897485813c7c256901dbca54ecf2) format("woff2"),url(fonts/lato-bold.woff?d878b6c29b10beca227e9eef4246111b) format("woff");font-weight:700;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold-italic.woff2?0b6bb6725576b072c5d0b02ecdd1900d) format("woff2"),url(fonts/lato-bold-italic.woff?9c7e4e9eb485b4a121c760e61bc3707c) format("woff");font-weight:700;font-style:italic;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-normal-italic.woff2?4eb103b4d12be57cb1d040ed5e162e9d) format("woff2"),url(fonts/lato-normal-italic.woff?f28f2d6482446544ef1ea1ccc6dd5892) format("woff");font-weight:400;font-style:italic;font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:400;src:url(fonts/Roboto-Slab-Regular.woff2?7abf5b8d04d26a2cafea937019bca958) format("woff2"),url(fonts/Roboto-Slab-Regular.woff?c1be9284088d487c5e3ff0a10a92e58c) format("woff");font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:700;src:url(fonts/Roboto-Slab-Bold.woff2?9984f4a9bda09be08e83f2506954adbe) format("woff2"),url(fonts/Roboto-Slab-Bold.woff?bed5564a116b05148e3b3bea6fb1162a) format("woff");font-display:block} \ No newline at end of file diff --git a/docs/_build/html/_static/doctools.js b/docs/_static/doctools.js similarity index 100% rename from docs/_build/html/_static/doctools.js rename to docs/_static/doctools.js diff --git a/docs/_build/html/_static/documentation_options.js b/docs/_static/documentation_options.js similarity index 93% rename from docs/_build/html/_static/documentation_options.js rename to docs/_static/documentation_options.js index a3a090557..c78f65793 100644 --- a/docs/_build/html/_static/documentation_options.js +++ b/docs/_static/documentation_options.js @@ -1,5 +1,5 @@ const DOCUMENTATION_OPTIONS = { - VERSION: '8.0b1', + VERSION: '8.1', LANGUAGE: 'en', COLLAPSE_INDEX: false, BUILDER: 'html', diff --git a/docs/_build/html/_static/file.png b/docs/_static/file.png similarity index 100% rename from docs/_build/html/_static/file.png rename to docs/_static/file.png diff --git a/docs/_static/fonts/Lato/lato-bold.eot b/docs/_static/fonts/Lato/lato-bold.eot new file mode 100644 index 000000000..3361183a4 Binary files /dev/null and b/docs/_static/fonts/Lato/lato-bold.eot differ diff --git a/docs/_static/fonts/Lato/lato-bold.ttf b/docs/_static/fonts/Lato/lato-bold.ttf new file mode 100644 index 000000000..29f691d5e Binary files /dev/null and b/docs/_static/fonts/Lato/lato-bold.ttf differ diff --git a/docs/_static/fonts/Lato/lato-bold.woff b/docs/_static/fonts/Lato/lato-bold.woff new file mode 100644 index 000000000..c6dff51f0 Binary files /dev/null and b/docs/_static/fonts/Lato/lato-bold.woff differ diff --git a/docs/_static/fonts/Lato/lato-bold.woff2 b/docs/_static/fonts/Lato/lato-bold.woff2 new file mode 100644 index 000000000..bb195043c Binary files /dev/null and b/docs/_static/fonts/Lato/lato-bold.woff2 differ diff --git a/docs/_static/fonts/Lato/lato-bolditalic.eot b/docs/_static/fonts/Lato/lato-bolditalic.eot new file mode 100644 index 000000000..3d4154936 Binary files /dev/null and b/docs/_static/fonts/Lato/lato-bolditalic.eot differ diff --git a/docs/_static/fonts/Lato/lato-bolditalic.ttf b/docs/_static/fonts/Lato/lato-bolditalic.ttf new file mode 100644 index 000000000..f402040b3 Binary files /dev/null and b/docs/_static/fonts/Lato/lato-bolditalic.ttf differ diff --git a/docs/_static/fonts/Lato/lato-bolditalic.woff b/docs/_static/fonts/Lato/lato-bolditalic.woff new file mode 100644 index 000000000..88ad05b9f Binary files /dev/null and b/docs/_static/fonts/Lato/lato-bolditalic.woff differ diff --git a/docs/_static/fonts/Lato/lato-bolditalic.woff2 b/docs/_static/fonts/Lato/lato-bolditalic.woff2 new file mode 100644 index 000000000..c4e3d804b Binary files /dev/null and b/docs/_static/fonts/Lato/lato-bolditalic.woff2 differ diff --git a/docs/_static/fonts/Lato/lato-italic.eot b/docs/_static/fonts/Lato/lato-italic.eot new file mode 100644 index 000000000..3f826421a Binary files /dev/null and b/docs/_static/fonts/Lato/lato-italic.eot differ diff --git a/docs/_static/fonts/Lato/lato-italic.ttf b/docs/_static/fonts/Lato/lato-italic.ttf new file mode 100644 index 000000000..b4bfc9b24 Binary files /dev/null and b/docs/_static/fonts/Lato/lato-italic.ttf differ diff --git a/docs/_static/fonts/Lato/lato-italic.woff b/docs/_static/fonts/Lato/lato-italic.woff new file mode 100644 index 000000000..76114bc03 Binary files /dev/null and b/docs/_static/fonts/Lato/lato-italic.woff differ diff --git a/docs/_static/fonts/Lato/lato-italic.woff2 b/docs/_static/fonts/Lato/lato-italic.woff2 new file mode 100644 index 000000000..3404f37e2 Binary files /dev/null and b/docs/_static/fonts/Lato/lato-italic.woff2 differ diff --git a/docs/_static/fonts/Lato/lato-regular.eot b/docs/_static/fonts/Lato/lato-regular.eot new file mode 100644 index 000000000..11e3f2a5f Binary files /dev/null and b/docs/_static/fonts/Lato/lato-regular.eot differ diff --git a/docs/_static/fonts/Lato/lato-regular.ttf b/docs/_static/fonts/Lato/lato-regular.ttf new file mode 100644 index 000000000..74decd9eb Binary files /dev/null and b/docs/_static/fonts/Lato/lato-regular.ttf differ diff --git a/docs/_static/fonts/Lato/lato-regular.woff b/docs/_static/fonts/Lato/lato-regular.woff new file mode 100644 index 000000000..ae1307ff5 Binary files /dev/null and b/docs/_static/fonts/Lato/lato-regular.woff differ diff --git a/docs/_static/fonts/Lato/lato-regular.woff2 b/docs/_static/fonts/Lato/lato-regular.woff2 new file mode 100644 index 000000000..3bf984332 Binary files /dev/null and b/docs/_static/fonts/Lato/lato-regular.woff2 differ diff --git a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot new file mode 100644 index 000000000..79dc8efed Binary files /dev/null and b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot differ diff --git a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf new file mode 100644 index 000000000..df5d1df27 Binary files /dev/null and b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf differ diff --git a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff new file mode 100644 index 000000000..6cb600001 Binary files /dev/null and b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff differ diff --git a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2 b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2 new file mode 100644 index 000000000..7059e2314 Binary files /dev/null and b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2 differ diff --git a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot new file mode 100644 index 000000000..2f7ca78a1 Binary files /dev/null and b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot differ diff --git a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf new file mode 100644 index 000000000..eb52a7907 Binary files /dev/null and b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf differ diff --git a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff new file mode 100644 index 000000000..f815f63f9 Binary files /dev/null and b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff differ diff --git a/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2 b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2 new file mode 100644 index 000000000..f2c76e5bd Binary files /dev/null and b/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2 differ diff --git a/docs/_build/html/_static/graphviz.css b/docs/_static/graphviz.css similarity index 100% rename from docs/_build/html/_static/graphviz.css rename to docs/_static/graphviz.css diff --git a/docs/_build/html/_static/jquery.js b/docs/_static/jquery.js similarity index 100% rename from docs/_build/html/_static/jquery.js rename to docs/_static/jquery.js diff --git a/docs/_build/html/_static/js/badge_only.js b/docs/_static/js/badge_only.js similarity index 100% rename from docs/_build/html/_static/js/badge_only.js rename to docs/_static/js/badge_only.js diff --git a/docs/_build/html/_static/js/theme.js b/docs/_static/js/theme.js similarity index 100% rename from docs/_build/html/_static/js/theme.js rename to docs/_static/js/theme.js diff --git a/docs/_static/js/versions.js b/docs/_static/js/versions.js new file mode 100644 index 000000000..4958195e0 --- /dev/null +++ b/docs/_static/js/versions.js @@ -0,0 +1,228 @@ +const themeFlyoutDisplay = "hidden"; +const themeVersionSelector = true; +const themeLanguageSelector = true; + +if (themeFlyoutDisplay === "attached") { + function renderLanguages(config) { + if (!config.projects.translations.length) { + return ""; + } + + // Insert the current language to the options on the selector + let languages = config.projects.translations.concat(config.projects.current); + languages = languages.sort((a, b) => a.language.name.localeCompare(b.language.name)); + + const languagesHTML = ` +
    +
    Languages
    + ${languages + .map( + (translation) => ` +
    + ${translation.language.code} +
    + `, + ) + .join("\n")} +
    + `; + return languagesHTML; + } + + function renderVersions(config) { + if (!config.versions.active.length) { + return ""; + } + const versionsHTML = ` +
    +
    Versions
    + ${config.versions.active + .map( + (version) => ` +
    + ${version.slug} +
    + `, + ) + .join("\n")} +
    + `; + return versionsHTML; + } + + function renderDownloads(config) { + if (!Object.keys(config.versions.current.downloads).length) { + return ""; + } + const downloadsNameDisplay = { + pdf: "PDF", + epub: "Epub", + htmlzip: "HTML", + }; + + const downloadsHTML = ` +
    +
    Downloads
    + ${Object.entries(config.versions.current.downloads) + .map( + ([name, url]) => ` +
    + ${downloadsNameDisplay[name]} +
    + `, + ) + .join("\n")} +
    + `; + return downloadsHTML; + } + + document.addEventListener("readthedocs-addons-data-ready", function (event) { + const config = event.detail.data(); + + const flyout = ` +
    + + Read the Docs + v: ${config.versions.current.slug} + + +
    +
    + ${renderLanguages(config)} + ${renderVersions(config)} + ${renderDownloads(config)} +
    +
    On Read the Docs
    +
    + Project Home +
    +
    + Builds +
    +
    + Downloads +
    +
    +
    +
    Search
    +
    + + +
    + +
    +
    + + Hosted by Read the Docs + +
    +
    + `; + + // Inject the generated flyout into the body HTML element. + document.body.insertAdjacentHTML("beforeend", flyout); + + // Trigger the Read the Docs Addons Search modal when clicking on the "Search docs" input from inside the flyout. + document + .querySelector("#flyout-search-form") + .addEventListener("focusin", () => { + const event = new CustomEvent("readthedocs-search-show"); + document.dispatchEvent(event); + }); + }) +} + +if (themeLanguageSelector || themeVersionSelector) { + function onSelectorSwitch(event) { + const option = event.target.selectedIndex; + const item = event.target.options[option]; + window.location.href = item.dataset.url; + } + + document.addEventListener("readthedocs-addons-data-ready", function (event) { + const config = event.detail.data(); + + const versionSwitch = document.querySelector( + "div.switch-menus > div.version-switch", + ); + if (themeVersionSelector) { + let versions = config.versions.active; + if (config.versions.current.hidden || config.versions.current.type === "external") { + versions.unshift(config.versions.current); + } + const versionSelect = ` + + `; + + versionSwitch.innerHTML = versionSelect; + versionSwitch.firstElementChild.addEventListener("change", onSelectorSwitch); + } + + const languageSwitch = document.querySelector( + "div.switch-menus > div.language-switch", + ); + + if (themeLanguageSelector) { + if (config.projects.translations.length) { + // Add the current language to the options on the selector + let languages = config.projects.translations.concat( + config.projects.current, + ); + languages = languages.sort((a, b) => + a.language.name.localeCompare(b.language.name), + ); + + const languageSelect = ` + + `; + + languageSwitch.innerHTML = languageSelect; + languageSwitch.firstElementChild.addEventListener("change", onSelectorSwitch); + } + else { + languageSwitch.remove(); + } + } + }); +} + +document.addEventListener("readthedocs-addons-data-ready", function (event) { + // Trigger the Read the Docs Addons Search modal when clicking on "Search docs" input from the topnav. + document + .querySelector("[role='search'] input") + .addEventListener("focusin", () => { + const event = new CustomEvent("readthedocs-search-show"); + document.dispatchEvent(event); + }); +}); \ No newline at end of file diff --git a/docs/_build/html/_static/jupyterlite_badge_logo.svg b/docs/_static/jupyterlite_badge_logo.svg similarity index 100% rename from docs/_build/html/_static/jupyterlite_badge_logo.svg rename to docs/_static/jupyterlite_badge_logo.svg diff --git a/docs/_build/html/_static/language_data.js b/docs/_static/language_data.js similarity index 100% rename from docs/_build/html/_static/language_data.js rename to docs/_static/language_data.js diff --git a/docs/_build/html/_static/minus.png b/docs/_static/minus.png similarity index 100% rename from docs/_build/html/_static/minus.png rename to docs/_static/minus.png diff --git a/docs/_build/html/_static/no_image.png b/docs/_static/no_image.png similarity index 100% rename from docs/_build/html/_static/no_image.png rename to docs/_static/no_image.png diff --git a/docs/_build/html/_static/plus.png b/docs/_static/plus.png similarity index 100% rename from docs/_build/html/_static/plus.png rename to docs/_static/plus.png diff --git a/docs/_build/html/_static/pygments.css b/docs/_static/pygments.css similarity index 100% rename from docs/_build/html/_static/pygments.css rename to docs/_static/pygments.css diff --git a/docs/_build/html/_static/searchtools.js b/docs/_static/searchtools.js similarity index 100% rename from docs/_build/html/_static/searchtools.js rename to docs/_static/searchtools.js diff --git a/docs/_build/html/_static/sg_gallery-binder.css b/docs/_static/sg_gallery-binder.css similarity index 100% rename from docs/_build/html/_static/sg_gallery-binder.css rename to docs/_static/sg_gallery-binder.css diff --git a/docs/_build/html/_static/sg_gallery-dataframe.css b/docs/_static/sg_gallery-dataframe.css similarity index 100% rename from docs/_build/html/_static/sg_gallery-dataframe.css rename to docs/_static/sg_gallery-dataframe.css diff --git a/docs/_build/html/_static/sg_gallery-rendered-html.css b/docs/_static/sg_gallery-rendered-html.css similarity index 100% rename from docs/_build/html/_static/sg_gallery-rendered-html.css rename to docs/_static/sg_gallery-rendered-html.css diff --git a/docs/_build/html/_static/sg_gallery.css b/docs/_static/sg_gallery.css similarity index 100% rename from docs/_build/html/_static/sg_gallery.css rename to docs/_static/sg_gallery.css diff --git a/docs/_build/html/_static/sphinx_highlight.js b/docs/_static/sphinx_highlight.js similarity index 100% rename from docs/_build/html/_static/sphinx_highlight.js rename to docs/_static/sphinx_highlight.js diff --git a/docs/_build/html/genindex.html b/docs/genindex.html similarity index 96% rename from docs/_build/html/genindex.html rename to docs/genindex.html index 3bee2fff2..4703aba91 100644 --- a/docs/_build/html/genindex.html +++ b/docs/genindex.html @@ -1,11 +1,13 @@ + + - Index — coremltools API Reference 8.0b1 documentation + Index — coremltools API Reference 8.1 documentation - + @@ -14,15 +16,11 @@ - - - - - - - + + + + + @@ -39,9 +37,6 @@ coremltools API Reference -
    - 8.0b1 -
    @@ -578,6 +573,8 @@

    C

  • cast_optimization (class in coremltools.converters.mil.mil.passes.defs.optimize_repeat_ops)
  • ceil (class in coremltools.converters.mil.mil.ops.defs.iOS15.elementwise_unary) +
  • +
  • change_input_output_tensor_type() (in module coremltools.models.utils)
  • clamped_relu (class in coremltools.converters.mil.mil.ops.defs.iOS15.activation) @@ -868,8 +865,6 @@

    C

  • module
  • - -
    • coremltools.converters.mil.mil.ops.defs.iOS17.activation @@ -877,6 +872,8 @@

      C

    • module
    + + +
  • from_memory() (coremltools.models.model.MLModelAsset class method) +
  • +
  • from_path() (coremltools.models.model.MLModelAsset class method) +
  • from_yaml() (coremltools.optimize.coreml.OptimizationConfig class method)
      @@ -1414,10 +1431,20 @@

      G

  • gelu (class in coremltools.converters.mil.mil.ops.defs.iOS15.activation) +
  • +
  • get_all_compute_devices() (coremltools.models.compute_device.MLComputeDevice class method) +
  • +
  • get_available_compute_devices() (coremltools.models.model.MLModel class method)
  • - +