diff --git a/.buildinfo b/.buildinfo new file mode 100644 index 00000000..6c223b34 --- /dev/null +++ b/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: 3f3b480ce3af5fd6759848fc23a27f9b +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/_autosummary/elm.base.ApiBase.html b/_autosummary/elm.base.ApiBase.html new file mode 100644 index 00000000..2bfa38d4 --- /dev/null +++ b/_autosummary/elm.base.ApiBase.html @@ -0,0 +1,505 @@ + + + + + + + elm.base.ApiBase — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.base.ApiBase

+
+
+class ApiBase(model=None)[source]
+

Bases: ABC

+

Class to parse text from a PDF document.

+
+
Parameters:
+

model (None | str) – Optional specification of OpenAI model to use. Default is +cls.DEFAULT_MODEL

+
+
+

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +

call_api(url, headers, request_json)

Make an asyncronous OpenAI API call.

call_api_async(url, headers, all_request_jsons)

Use GPT to clean raw pdf text in parallel calls to the OpenAI API.

chat(query[, temperature])

Have a continuous chat with the LLM including context from previous chat() calls stored as attributes in this class.

clear()

Clear chat history and reduce messages to just the initial model role message.

count_tokens(text, model)

Return the number of tokens in a string.

generic_async_query(queries[, model_role, ...])

Run a number of generic single queries asynchronously (not conversational)

generic_query(query[, model_role, temperature])

Ask a generic single query without conversation

get_embedding(text)

Get the 1D array (list) embedding of a text string.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + +

DEFAULT_MODEL

Default model to do pdf text cleaning.

EMBEDDING_MODEL

Default model to do text embeddings.

EMBEDDING_URL

OpenAI embedding API URL

HEADERS

OpenAI API Headers

MODEL_ROLE

High level model role

URL

OpenAI API API URL

all_messages_txt

Get a string printout of the full conversation with the LLM

+
+
+DEFAULT_MODEL = 'gpt-3.5-turbo'
+

Default model to do pdf text cleaning.

+
+ +
+
+EMBEDDING_MODEL = 'text-embedding-ada-002'
+

Default model to do text embeddings.

+
+ +
+
+EMBEDDING_URL = 'https://api.openai.com/v1/embeddings'
+

OpenAI embedding API URL

+
+ +
+
+URL = 'https://api.openai.com/v1/chat/completions'
+

OpenAI API API URL

+
+ +
+
+HEADERS = {'Authorization': 'Bearer None', 'Content-Type': 'application/json', 'api-key': 'None'}
+

OpenAI API Headers

+
+ +
+
+MODEL_ROLE = 'You are a research assistant that answers questions.'
+

High level model role

+
+ +
+
+property all_messages_txt
+

Get a string printout of the full conversation with the LLM

+
+
Returns:
+

str

+
+
+
+ +
+
+clear()[source]
+

Clear chat history and reduce messages to just the initial model +role message.

+
+ +
+
+async static call_api(url, headers, request_json)[source]
+

Make an asyncronous OpenAI API call.

+
+
Parameters:
+
    +
  • url (str) –

    +
    +
    OpenAI API url, typically either:

    https://api.openai.com/v1/embeddings +https://api.openai.com/v1/chat/completions

    +
    +
    +
  • +
  • headers (dict) –

    +
    +
    OpenAI API headers, typically:
    +
    {“Content-Type”: “application/json”,

    “Authorization”: f”Bearer {openai.api_key}”}

    +
    +
    +
    +
    +
  • +
  • request_json (dict) –

    +
    +
    API data input, typically looks like this for chat completion:
    +
    {“model”: “gpt-3.5-turbo”,
    +
    “messages”: [{“role”: “system”, “content”: “You do this…”},

    {“role”: “user”, “content”: “Do this: {}”}],

    +
    +
    +

    “temperature”: 0.0}

    +
    +
    +
    +
    +
  • +
+
+
Returns:
+

out (dict) – API response in json format

+
+
+
+ +
+
+async call_api_async(url, headers, all_request_jsons, ignore_error=None, rate_limit=40000.0)[source]
+

Use GPT to clean raw pdf text in parallel calls to the OpenAI API.

+

NOTE: you need to call this using the await command in ipython or +jupyter, e.g.: out = await PDFtoTXT.clean_txt_async()

+
+
Parameters:
+
    +
  • url (str) –

    +
    +
    OpenAI API url, typically either:

    https://api.openai.com/v1/embeddings +https://api.openai.com/v1/chat/completions

    +
    +
    +
  • +
  • headers (dict) –

    +
    +
    OpenAI API headers, typically:
    +
    {“Content-Type”: “application/json”,

    “Authorization”: f”Bearer {openai.api_key}”}

    +
    +
    +
    +
    +
  • +
  • all_request_jsons (list) – List of API data input, one entry typically looks like this for +chat completion:

    +
    +
    +
    {“model”: “gpt-3.5-turbo”,
    +
    “messages”: [{“role”: “system”, “content”: “You do this…”},

    {“role”: “user”, “content”: “Do this: {}”}],

    +
    +
    +

    “temperature”: 0.0}

    +
    +
    +
    +
  • +
  • ignore_error (None | callable) – Optional callable to parse API error string. If the callable +returns True, the error will be ignored, the API call will not be +tried again, and the output will be an empty string.

  • +
  • rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +gpt-3.5-turbo limit is 90k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

  • +
+
+
Returns:
+

out (list) – List of API outputs where each list entry is a GPT answer from the +corresponding message in the all_request_jsons input.

+
+
+
+ +
+
+chat(query, temperature=0)[source]
+

Have a continuous chat with the LLM including context from previous +chat() calls stored as attributes in this class.

+
+
Parameters:
+
    +
  • query (str) – Question to ask ChatGPT

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
+
+
Returns:
+

response (str) – Model response

+
+
+
+ +
+
+generic_query(query, model_role=None, temperature=0)[source]
+

Ask a generic single query without conversation

+
+
Parameters:
+
    +
  • query (str) – Question to ask ChatGPT

  • +
  • model_role (str | None) – Role for the model to take, e.g.: “You are a research assistant”. +This defaults to self.MODEL_ROLE

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
+
+
Returns:
+

response (str) – Model response

+
+
+
+ +
+
+async generic_async_query(queries, model_role=None, temperature=0, ignore_error=None, rate_limit=40000.0)[source]
+

Run a number of generic single queries asynchronously +(not conversational)

+

NOTE: you need to call this using the await command in ipython or +jupyter, e.g.: out = await Summary.run_async()

+
+
Parameters:
+
    +
  • query (list) – Questions to ask ChatGPT (list of strings)

  • +
  • model_role (str | None) – Role for the model to take, e.g.: “You are a research assistant”. +This defaults to self.MODEL_ROLE

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
  • ignore_error (None | callable) – Optional callable to parse API error string. If the callable +returns True, the error will be ignored, the API call will not be +tried again, and the output will be an empty string.

  • +
  • rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +gpt-3.5-turbo limit is 90k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

  • +
+
+
Returns:
+

response (list) – Model responses with same length as query input.

+
+
+
+ +
+
+classmethod get_embedding(text)[source]
+

Get the 1D array (list) embedding of a text string.

+
+
Parameters:
+

text (str) – Text to embed

+
+
Returns:
+

embedding (list) – List of float that represents the numerical embedding of the text

+
+
+
+ +
+
+static count_tokens(text, model)[source]
+

Return the number of tokens in a string.

+
+
Parameters:
+
    +
  • text (str) – Text string to get number of tokens for

  • +
  • model (str) – specification of OpenAI model to use (e.g., “gpt-3.5-turbo”)

  • +
+
+
Returns:
+

n (int) – Number of tokens in text

+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.base.ApiQueue.html b/_autosummary/elm.base.ApiQueue.html new file mode 100644 index 00000000..a64588d7 --- /dev/null +++ b/_autosummary/elm.base.ApiQueue.html @@ -0,0 +1,287 @@ + + + + + + + elm.base.ApiQueue — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.base.ApiQueue

+
+
+class ApiQueue(url, headers, request_jsons, ignore_error=None, rate_limit=40000.0, max_retries=10)[source]
+

Bases: object

+

Class to manage the parallel API queue and submission

+
+
Parameters:
+
    +
  • url (str) –

    +
    +
    OpenAI API url, typically either:

    https://api.openai.com/v1/embeddings +https://api.openai.com/v1/chat/completions

    +
    +
    +
  • +
  • headers (dict) –

    +
    +
    OpenAI API headers, typically:
    +
    {“Content-Type”: “application/json”,

    “Authorization”: f”Bearer {openai.api_key}”}

    +
    +
    +
    +
    +
  • +
  • all_request_jsons (list) – List of API data input, one entry typically looks like this for +chat completion:

    +
    +
    +
    {“model”: “gpt-3.5-turbo”,
    +
    “messages”: [{“role”: “system”, “content”: “You do this…”},

    {“role”: “user”, “content”: “Do this: {}”}],

    +
    +
    +

    “temperature”: 0.0}

    +
    +
    +
    +
  • +
  • ignore_error (None | callable) – Optional callable to parse API error string. If the callable +returns True, the error will be ignored, the API call will not be +tried again, and the output will be an empty string.

  • +
  • rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +gpt-3.5-turbo limit is 90k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

  • +
  • max_retries (int) – Number of times to retry an API call with an error response before +raising an error.

  • +
+
+
+

Methods

+ + + + + + + + + + + + +

collect_jobs()

Collect asyncronous API calls and API outputs.

run()

Run all asyncronous API calls.

submit_jobs()

Submit a subset jobs asynchronously and hold jobs in the api_jobs attribute.

+

Attributes

+ + + + + + +

waiting_on

Get a list of async jobs that are being waited on.

+
+
+property waiting_on
+

Get a list of async jobs that are being waited on.

+
+ +
+
+submit_jobs()[source]
+

Submit a subset jobs asynchronously and hold jobs in the api_jobs +attribute. Break when the rate_limit is exceeded.

+
+ +
+
+async collect_jobs()[source]
+

Collect asyncronous API calls and API outputs. Store outputs in the +out attribute.

+
+ +
+
+async run()[source]
+

Run all asyncronous API calls.

+
+
Returns:
+

out (list) – List of API call outputs with same ordering as request_jsons +input.

+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.base.html b/_autosummary/elm.base.html new file mode 100644 index 00000000..63e22674 --- /dev/null +++ b/_autosummary/elm.base.html @@ -0,0 +1,193 @@ + + + + + + + elm.base — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.base

+

ELM abstract class for API calls

+

Classes

+ + + + + + + + + +

ApiBase([model])

Class to parse text from a PDF document.

ApiQueue(url, headers, request_jsons[, ...])

Class to manage the parallel API queue and submission

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.chunk.Chunker.html b/_autosummary/elm.chunk.Chunker.html new file mode 100644 index 00000000..77a6f2b3 --- /dev/null +++ b/_autosummary/elm.chunk.Chunker.html @@ -0,0 +1,643 @@ + + + + + + + elm.chunk.Chunker — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.chunk.Chunker

+
+
+class Chunker(text, tag=None, tokens_per_chunk=500, overlap=1, split_on='\n\n')[source]
+

Bases: ApiBase

+

Class to break text up into overlapping chunks

+

NOTE: very large paragraphs that exceed the tokens per chunk will not be +split up and will still be padded with overlap.

+
+
Parameters:
+
    +
  • text (str) – Single body of text to break up. Works well if this is a single +document with empty lines between paragraphs.

  • +
  • tag (None | str) – Optional reference tag to include at the beginning of each text +chunk

  • +
  • tokens_per_chunk (float) – Nominal token count per text chunk. Overlap paragraphs will exceed +this.

  • +
  • overlap (int) – Number of paragraphs to overlap between chunks

  • +
  • split_on (str) – Sub string to split text into paragraphs.

  • +
+
+
+

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

add_overlap(chunks_input)

Add overlap on either side of a text chunk.

call_api(url, headers, request_json)

Make an asyncronous OpenAI API call.

call_api_async(url, headers, all_request_jsons)

Use GPT to clean raw pdf text in parallel calls to the OpenAI API.

chat(query[, temperature])

Have a continuous chat with the LLM including context from previous chat() calls stored as attributes in this class.

chunk_text()

Perform the text chunking operation

clean_paragraphs(text)

Clean up double line breaks to make sure paragraphs can be detected in the text.

clear()

Clear chat history and reduce messages to just the initial model role message.

count_tokens(text, model)

Return the number of tokens in a string.

generic_async_query(queries[, model_role, ...])

Run a number of generic single queries asynchronously (not conversational)

generic_query(query[, model_role, temperature])

Ask a generic single query without conversation

get_embedding(text)

Get the 1D array (list) embedding of a text string.

is_good_paragraph(paragraph)

Basic tests to make sure the paragraph is useful text.

merge_chunks(chunks_input)

Merge chunks until they reach the token limit per chunk.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

DEFAULT_MODEL

Default model to do pdf text cleaning.

EMBEDDING_MODEL

Default model to do text embeddings.

EMBEDDING_URL

OpenAI embedding API URL

HEADERS

OpenAI API Headers

MODEL_ROLE

High level model role

URL

OpenAI API API URL

all_messages_txt

Get a string printout of the full conversation with the LLM

chunk_tokens

Number of tokens per chunk.

chunks

List of overlapping text chunks (strings).

paragraph_tokens

Number of tokens per paragraph.

paragraphs

Get a list of paragraphs in the text demarkated by an empty line.

+
+
+property chunks
+

List of overlapping text chunks (strings).

+
+
Returns:
+

list

+
+
+
+ +
+
+property paragraphs
+

Get a list of paragraphs in the text demarkated by an empty line.

+
+
Returns:
+

list

+
+
+
+ +
+
+static clean_paragraphs(text)[source]
+

Clean up double line breaks to make sure paragraphs can be detected +in the text.

+
+ +
+
+static is_good_paragraph(paragraph)[source]
+

Basic tests to make sure the paragraph is useful text.

+
+ +
+
+property paragraph_tokens
+

Number of tokens per paragraph.

+
+
Returns:
+

list

+
+
+
+ +
+
+property chunk_tokens
+

Number of tokens per chunk.

+
+
Returns:
+

list

+
+
+
+ +
+
+merge_chunks(chunks_input)[source]
+

Merge chunks until they reach the token limit per chunk.

+
+
Parameters:
+

chunks_input (list) – List of list of integers: [[0, 1], [2], [3, 4]] where nested lists +are chunks and the integers are paragraph indices

+
+
Returns:
+

chunks (list) – List of list of integers: [[0, 1], [2], [3, 4]] where nested lists +are chunks and the integers are paragraph indices

+
+
+
+ +
+
+add_overlap(chunks_input)[source]
+

Add overlap on either side of a text chunk. This ignores token +limit.

+
+
Parameters:
+

chunks_input (list) – List of list of integers: [[0, 1], [2], [3, 4]] where nested lists +are chunks and the integers are paragraph indices

+
+
Returns:
+

chunks (list) – List of list of integers: [[0, 1], [2], [3, 4]] where nested lists +are chunks and the integers are paragraph indices

+
+
+
+ +
+
+chunk_text()[source]
+

Perform the text chunking operation

+
+
Returns:
+

chunks (list) – List of strings where each string is an overlapping chunk of text

+
+
+
+ +
+
+DEFAULT_MODEL = 'gpt-3.5-turbo'
+

Default model to do pdf text cleaning.

+
+ +
+
+EMBEDDING_MODEL = 'text-embedding-ada-002'
+

Default model to do text embeddings.

+
+ +
+
+EMBEDDING_URL = 'https://api.openai.com/v1/embeddings'
+

OpenAI embedding API URL

+
+ +
+
+HEADERS = {'Authorization': 'Bearer None', 'Content-Type': 'application/json', 'api-key': 'None'}
+

OpenAI API Headers

+
+ +
+
+MODEL_ROLE = 'You are a research assistant that answers questions.'
+

High level model role

+
+ +
+
+URL = 'https://api.openai.com/v1/chat/completions'
+

OpenAI API API URL

+
+ +
+
+property all_messages_txt
+

Get a string printout of the full conversation with the LLM

+
+
Returns:
+

str

+
+
+
+ +
+
+async static call_api(url, headers, request_json)
+

Make an asyncronous OpenAI API call.

+
+
Parameters:
+
    +
  • url (str) –

    +
    +
    OpenAI API url, typically either:

    https://api.openai.com/v1/embeddings +https://api.openai.com/v1/chat/completions

    +
    +
    +
  • +
  • headers (dict) –

    +
    +
    OpenAI API headers, typically:
    +
    {“Content-Type”: “application/json”,

    “Authorization”: f”Bearer {openai.api_key}”}

    +
    +
    +
    +
    +
  • +
  • request_json (dict) –

    +
    +
    API data input, typically looks like this for chat completion:
    +
    {“model”: “gpt-3.5-turbo”,
    +
    “messages”: [{“role”: “system”, “content”: “You do this…”},

    {“role”: “user”, “content”: “Do this: {}”}],

    +
    +
    +

    “temperature”: 0.0}

    +
    +
    +
    +
    +
  • +
+
+
Returns:
+

out (dict) – API response in json format

+
+
+
+ +
+
+async call_api_async(url, headers, all_request_jsons, ignore_error=None, rate_limit=40000.0)
+

Use GPT to clean raw pdf text in parallel calls to the OpenAI API.

+

NOTE: you need to call this using the await command in ipython or +jupyter, e.g.: out = await PDFtoTXT.clean_txt_async()

+
+
Parameters:
+
    +
  • url (str) –

    +
    +
    OpenAI API url, typically either:

    https://api.openai.com/v1/embeddings +https://api.openai.com/v1/chat/completions

    +
    +
    +
  • +
  • headers (dict) –

    +
    +
    OpenAI API headers, typically:
    +
    {“Content-Type”: “application/json”,

    “Authorization”: f”Bearer {openai.api_key}”}

    +
    +
    +
    +
    +
  • +
  • all_request_jsons (list) – List of API data input, one entry typically looks like this for +chat completion:

    +
    +
    +
    {“model”: “gpt-3.5-turbo”,
    +
    “messages”: [{“role”: “system”, “content”: “You do this…”},

    {“role”: “user”, “content”: “Do this: {}”}],

    +
    +
    +

    “temperature”: 0.0}

    +
    +
    +
    +
  • +
  • ignore_error (None | callable) – Optional callable to parse API error string. If the callable +returns True, the error will be ignored, the API call will not be +tried again, and the output will be an empty string.

  • +
  • rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +gpt-3.5-turbo limit is 90k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

  • +
+
+
Returns:
+

out (list) – List of API outputs where each list entry is a GPT answer from the +corresponding message in the all_request_jsons input.

+
+
+
+ +
+
+chat(query, temperature=0)
+

Have a continuous chat with the LLM including context from previous +chat() calls stored as attributes in this class.

+
+
Parameters:
+
    +
  • query (str) – Question to ask ChatGPT

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
+
+
Returns:
+

response (str) – Model response

+
+
+
+ +
+
+clear()
+

Clear chat history and reduce messages to just the initial model +role message.

+
+ +
+
+static count_tokens(text, model)
+

Return the number of tokens in a string.

+
+
Parameters:
+
    +
  • text (str) – Text string to get number of tokens for

  • +
  • model (str) – specification of OpenAI model to use (e.g., “gpt-3.5-turbo”)

  • +
+
+
Returns:
+

n (int) – Number of tokens in text

+
+
+
+ +
+
+async generic_async_query(queries, model_role=None, temperature=0, ignore_error=None, rate_limit=40000.0)
+

Run a number of generic single queries asynchronously +(not conversational)

+

NOTE: you need to call this using the await command in ipython or +jupyter, e.g.: out = await Summary.run_async()

+
+
Parameters:
+
    +
  • query (list) – Questions to ask ChatGPT (list of strings)

  • +
  • model_role (str | None) – Role for the model to take, e.g.: “You are a research assistant”. +This defaults to self.MODEL_ROLE

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
  • ignore_error (None | callable) – Optional callable to parse API error string. If the callable +returns True, the error will be ignored, the API call will not be +tried again, and the output will be an empty string.

  • +
  • rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +gpt-3.5-turbo limit is 90k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

  • +
+
+
Returns:
+

response (list) – Model responses with same length as query input.

+
+
+
+ +
+
+generic_query(query, model_role=None, temperature=0)
+

Ask a generic single query without conversation

+
+
Parameters:
+
    +
  • query (str) – Question to ask ChatGPT

  • +
  • model_role (str | None) – Role for the model to take, e.g.: “You are a research assistant”. +This defaults to self.MODEL_ROLE

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
+
+
Returns:
+

response (str) – Model response

+
+
+
+ +
+
+classmethod get_embedding(text)
+

Get the 1D array (list) embedding of a text string.

+
+
Parameters:
+

text (str) – Text to embed

+
+
Returns:
+

embedding (list) – List of float that represents the numerical embedding of the text

+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.chunk.html b/_autosummary/elm.chunk.html new file mode 100644 index 00000000..a4b829f9 --- /dev/null +++ b/_autosummary/elm.chunk.html @@ -0,0 +1,190 @@ + + + + + + + elm.chunk — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.chunk

+

Utility to break text up into overlapping chunks.

+

Classes

+ + + + + + +

Chunker(text[, tag, tokens_per_chunk, ...])

Class to break text up into overlapping chunks

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.embed.ChunkAndEmbed.html b/_autosummary/elm.embed.ChunkAndEmbed.html new file mode 100644 index 00000000..186ef08c --- /dev/null +++ b/_autosummary/elm.embed.ChunkAndEmbed.html @@ -0,0 +1,563 @@ + + + + + + + elm.embed.ChunkAndEmbed — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.embed.ChunkAndEmbed

+
+
+class ChunkAndEmbed(text, model=None, **chunk_kwargs)[source]
+

Bases: ApiBase

+

Class to chunk text data and create embeddings

+
+
Parameters:
+
    +
  • text (str) – Single continuous piece of text to chunk up by paragraph and embed +or filepath to .txt file containing one piece of text.

  • +
  • model (None | str) – Optional specification of OpenAI model to use. Default is +cls.DEFAULT_MODEL

  • +
  • chunk_kwargs (dict | None) – kwargs for initialization of elm.chunk.Chunker

  • +
+
+
+

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

call_api(url, headers, request_json)

Make an asyncronous OpenAI API call.

call_api_async(url, headers, all_request_jsons)

Use GPT to clean raw pdf text in parallel calls to the OpenAI API.

chat(query[, temperature])

Have a continuous chat with the LLM including context from previous chat() calls stored as attributes in this class.

clean_tables(text)

Make sure that table headers are in the same paragraph as the table itself.

clear()

Clear chat history and reduce messages to just the initial model role message.

count_tokens(text, model)

Return the number of tokens in a string.

generic_async_query(queries[, model_role, ...])

Run a number of generic single queries asynchronously (not conversational)

generic_query(query[, model_role, temperature])

Ask a generic single query without conversation

get_embedding(text)

Get the 1D array (list) embedding of a text string.

run([rate_limit])

Run text embedding in serial

run_async([rate_limit])

Run text embedding on chunks asynchronously

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + +

DEFAULT_MODEL

Default model to do embeddings.

EMBEDDING_MODEL

Default model to do text embeddings.

EMBEDDING_URL

OpenAI embedding API URL

HEADERS

OpenAI API Headers

MODEL_ROLE

High level model role

URL

OpenAI API API URL

all_messages_txt

Get a string printout of the full conversation with the LLM

+
+
+DEFAULT_MODEL = 'text-embedding-ada-002'
+

Default model to do embeddings.

+
+ +
+
+EMBEDDING_MODEL = 'text-embedding-ada-002'
+

Default model to do text embeddings.

+
+ +
+
+EMBEDDING_URL = 'https://api.openai.com/v1/embeddings'
+

OpenAI embedding API URL

+
+ +
+
+HEADERS = {'Authorization': 'Bearer None', 'Content-Type': 'application/json', 'api-key': 'None'}
+

OpenAI API Headers

+
+ +
+
+MODEL_ROLE = 'You are a research assistant that answers questions.'
+

High level model role

+
+ +
+
+URL = 'https://api.openai.com/v1/chat/completions'
+

OpenAI API API URL

+
+ +
+
+property all_messages_txt
+

Get a string printout of the full conversation with the LLM

+
+
Returns:
+

str

+
+
+
+ +
+
+async static call_api(url, headers, request_json)
+

Make an asyncronous OpenAI API call.

+
+
Parameters:
+
    +
  • url (str) –

    +
    +
    OpenAI API url, typically either:

    https://api.openai.com/v1/embeddings +https://api.openai.com/v1/chat/completions

    +
    +
    +
  • +
  • headers (dict) –

    +
    +
    OpenAI API headers, typically:
    +
    {“Content-Type”: “application/json”,

    “Authorization”: f”Bearer {openai.api_key}”}

    +
    +
    +
    +
    +
  • +
  • request_json (dict) –

    +
    +
    API data input, typically looks like this for chat completion:
    +
    {“model”: “gpt-3.5-turbo”,
    +
    “messages”: [{“role”: “system”, “content”: “You do this…”},

    {“role”: “user”, “content”: “Do this: {}”}],

    +
    +
    +

    “temperature”: 0.0}

    +
    +
    +
    +
    +
  • +
+
+
Returns:
+

out (dict) – API response in json format

+
+
+
+ +
+
+async call_api_async(url, headers, all_request_jsons, ignore_error=None, rate_limit=40000.0)
+

Use GPT to clean raw pdf text in parallel calls to the OpenAI API.

+

NOTE: you need to call this using the await command in ipython or +jupyter, e.g.: out = await PDFtoTXT.clean_txt_async()

+
+
Parameters:
+
    +
  • url (str) –

    +
    +
    OpenAI API url, typically either:

    https://api.openai.com/v1/embeddings +https://api.openai.com/v1/chat/completions

    +
    +
    +
  • +
  • headers (dict) –

    +
    +
    OpenAI API headers, typically:
    +
    {“Content-Type”: “application/json”,

    “Authorization”: f”Bearer {openai.api_key}”}

    +
    +
    +
    +
    +
  • +
  • all_request_jsons (list) – List of API data input, one entry typically looks like this for +chat completion:

    +
    +
    +
    {“model”: “gpt-3.5-turbo”,
    +
    “messages”: [{“role”: “system”, “content”: “You do this…”},

    {“role”: “user”, “content”: “Do this: {}”}],

    +
    +
    +

    “temperature”: 0.0}

    +
    +
    +
    +
  • +
  • ignore_error (None | callable) – Optional callable to parse API error string. If the callable +returns True, the error will be ignored, the API call will not be +tried again, and the output will be an empty string.

  • +
  • rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +gpt-3.5-turbo limit is 90k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

  • +
+
+
Returns:
+

out (list) – List of API outputs where each list entry is a GPT answer from the +corresponding message in the all_request_jsons input.

+
+
+
+ +
+
+chat(query, temperature=0)
+

Have a continuous chat with the LLM including context from previous +chat() calls stored as attributes in this class.

+
+
Parameters:
+
    +
  • query (str) – Question to ask ChatGPT

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
+
+
Returns:
+

response (str) – Model response

+
+
+
+ +
+
+static clean_tables(text)[source]
+

Make sure that table headers are in the same paragraph as the table +itself. Typically, tables are looked for with pipes and hyphens, which +is how GPT cleans tables in text.

+
+ +
+
+clear()
+

Clear chat history and reduce messages to just the initial model +role message.

+
+ +
+
+static count_tokens(text, model)
+

Return the number of tokens in a string.

+
+
Parameters:
+
    +
  • text (str) – Text string to get number of tokens for

  • +
  • model (str) – specification of OpenAI model to use (e.g., “gpt-3.5-turbo”)

  • +
+
+
Returns:
+

n (int) – Number of tokens in text

+
+
+
+ +
+
+async generic_async_query(queries, model_role=None, temperature=0, ignore_error=None, rate_limit=40000.0)
+

Run a number of generic single queries asynchronously +(not conversational)

+

NOTE: you need to call this using the await command in ipython or +jupyter, e.g.: out = await Summary.run_async()

+
+
Parameters:
+
    +
  • query (list) – Questions to ask ChatGPT (list of strings)

  • +
  • model_role (str | None) – Role for the model to take, e.g.: “You are a research assistant”. +This defaults to self.MODEL_ROLE

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
  • ignore_error (None | callable) – Optional callable to parse API error string. If the callable +returns True, the error will be ignored, the API call will not be +tried again, and the output will be an empty string.

  • +
  • rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +gpt-3.5-turbo limit is 90k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

  • +
+
+
Returns:
+

response (list) – Model responses with same length as query input.

+
+
+
+ +
+
+generic_query(query, model_role=None, temperature=0)
+

Ask a generic single query without conversation

+
+
Parameters:
+
    +
  • query (str) – Question to ask ChatGPT

  • +
  • model_role (str | None) – Role for the model to take, e.g.: “You are a research assistant”. +This defaults to self.MODEL_ROLE

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
+
+
Returns:
+

response (str) – Model response

+
+
+
+ +
+
+classmethod get_embedding(text)
+

Get the 1D array (list) embedding of a text string.

+
+
Parameters:
+

text (str) – Text to embed

+
+
Returns:
+

embedding (list) – List of float that represents the numerical embedding of the text

+
+
+
+ +
+
+run(rate_limit=175000.0)[source]
+

Run text embedding in serial

+
+
Parameters:
+

rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +embedding limit is 350k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

+
+
Returns:
+

embedding (list) – List of 1D arrays representing the embeddings for all text chunks

+
+
+
+ +
+
+async run_async(rate_limit=175000.0)[source]
+

Run text embedding on chunks asynchronously

+

NOTE: you need to call this using the await command in ipython or +jupyter, e.g.: out = await ChunkAndEmbed.run_async()

+
+
Parameters:
+

rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +embedding limit is 350k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

+
+
Returns:
+

embedding (list) – List of 1D arrays representing the embeddings for all text chunks

+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.embed.html b/_autosummary/elm.embed.html new file mode 100644 index 00000000..fd1927d1 --- /dev/null +++ b/_autosummary/elm.embed.html @@ -0,0 +1,190 @@ + + + + + + + elm.embed — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.embed

+

ELM text embedding

+

Classes

+ + + + + + +

ChunkAndEmbed(text[, model])

Class to chunk text data and create embeddings

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.html b/_autosummary/elm.html new file mode 100644 index 00000000..048b9dae --- /dev/null +++ b/_autosummary/elm.html @@ -0,0 +1,212 @@ + + + + + + + elm — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm

+

Energy Language Model

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

elm.base

ELM abstract class for API calls

elm.chunk

Utility to break text up into overlapping chunks.

elm.embed

ELM text embedding

elm.osti

Utilities for retrieving data from OSTI.

elm.pdf

ELM PDF to text parser

elm.summary

Research Summarization and Distillation with LLMs

elm.tree

ELM decision trees.

elm.version

ELM version number

elm.wizard

ELM energy wizard

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.osti.OstiList.html b/_autosummary/elm.osti.OstiList.html new file mode 100644 index 00000000..5dff293a --- /dev/null +++ b/_autosummary/elm.osti.OstiList.html @@ -0,0 +1,390 @@ + + + + + + + elm.osti.OstiList — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.osti.OstiList

+
+
+class OstiList(url, n_pages=1)[source]
+

Bases: list

+

Class to retrieve and handle multiple OSTI records from an API URL.

+
+
Parameters:
+
    +
  • url (str) –

    +
    +
    OSTI API URL to request, see this for details:

    https://www.osti.gov/api/v1/docs

    +
    +
    +
  • +
  • n_pages (int) – Number of pages to get from the API. Typical response has 20 +entries per page. Default of 1 ensures that this class doesnt hang +on a million responses.

  • +
+
+
+

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

append(object, /)

Append object to the end of the list.

clear()

Remove all items from list.

copy()

Return a shallow copy of the list.

count(value, /)

Return number of occurrences of value.

download(out_dir)

Download all PDFs from the records in this OSTI object into a directory.

extend(iterable, /)

Extend list by appending elements from the iterable.

from_osti_ids(oids)

Initialize OSTI records from one or more numerical IDS

index(value[, start, stop])

Return first index of value.

insert(index, object, /)

Insert object before index.

pop([index])

Remove and return item at index (default last).

remove(value, /)

Remove first occurrence of value.

reverse()

Reverse IN PLACE.

sort(*[, key, reverse])

Sort the list in ascending order and return None.

+

Attributes

+ + + + + + + + + +

BASE_URL

Base OSTI API URL.

meta

Get a meta dataframe with details on all of the OSTI records.

+
+
+BASE_URL = 'https://www.osti.gov/api/v1/records'
+

Base OSTI API URL. This can be appended with search parameters

+
+ +
+
+download(out_dir)[source]
+

Download all PDFs from the records in this OSTI object into a +directory. PDFs will be given file names based on their OSTI record ID

+
+
Parameters:
+

out_dir (str) – Directory to download PDFs to. This directory will be created if it +does not already exist.

+
+
+
+ +
+
+property meta
+

Get a meta dataframe with details on all of the OSTI records.

+
+
Returns:
+

pd.DataFrame

+
+
+
+ +
+
+__add__(value, /)
+

Return self+value.

+
+ +
+
+__mul__(value, /)
+

Return self*value.

+
+ +
+
+append(object, /)
+

Append object to the end of the list.

+
+ +
+
+clear()
+

Remove all items from list.

+
+ +
+
+copy()
+

Return a shallow copy of the list.

+
+ +
+
+count(value, /)
+

Return number of occurrences of value.

+
+ +
+
+extend(iterable, /)
+

Extend list by appending elements from the iterable.

+
+ +
+
+classmethod from_osti_ids(oids)[source]
+

Initialize OSTI records from one or more numerical IDS

+
+
Parameters:
+

oids (list) – List of string or integer OSTI IDs which are typically 7 digit +numbers

+
+
Returns:
+

out (OstiList) – OstiList object with entries for each oid input.

+
+
+
+ +
+
+index(value, start=0, stop=9223372036854775807, /)
+

Return first index of value.

+

Raises ValueError if the value is not present.

+
+ +
+
+insert(index, object, /)
+

Insert object before index.

+
+ +
+
+pop(index=-1, /)
+

Remove and return item at index (default last).

+

Raises IndexError if list is empty or index is out of range.

+
+ +
+
+remove(value, /)
+

Remove first occurrence of value.

+

Raises ValueError if the value is not present.

+
+ +
+
+reverse()
+

Reverse IN PLACE.

+
+ +
+
+sort(*, key=None, reverse=False)
+

Sort the list in ascending order and return None.

+

The sort is in-place (i.e. the list itself is modified) and stable (i.e. the +order of two equal elements is maintained).

+

If a key function is given, apply it once to each list item and sort them, +ascending or descending, according to their function values.

+

The reverse flag can be set to sort in descending order.

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.osti.OstiRecord.html b/_autosummary/elm.osti.OstiRecord.html new file mode 100644 index 00000000..34301a94 --- /dev/null +++ b/_autosummary/elm.osti.OstiRecord.html @@ -0,0 +1,424 @@ + + + + + + + elm.osti.OstiRecord — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.osti.OstiRecord

+
+
+class OstiRecord(record)[source]
+

Bases: dict

+

Class to handle a single OSTI record as dictionary data

+
+
Parameters:
+

record (dict) – OSTI record in dict form, typically a response from OSTI API.

+
+
+

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

clear()

copy()

download(fp)

Download the PDF of this record

fromkeys([value])

Create a new dictionary with keys from iterable and values set to value.

get(key[, default])

Return the value for key if key is in the dictionary, else default.

items()

keys()

pop(k[,d])

If key is not found, default is returned if given, otherwise KeyError is raised

popitem()

Remove and return a (key, value) pair as a 2-tuple.

setdefault(key[, default])

Insert key with a value of default if key is not in the dictionary.

strip_nested_brackets(text)

Remove text between brackets/parentheses for cleaning OSTI text

update([E, ]**F)

If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v In either case, this is followed by: for k in F: D[k] = F[k]

values()

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + +

authors

Get the list of authors of this record.

date

Get the date of publication of this record

doi

Get the DOI of this record

osti_id

Get the OSTI ID of this record which is typically a 7 digit number

title

Get the title of this record

url

Get the download URL of this record

year

Get the year of publication of this record

+
+
+static strip_nested_brackets(text)[source]
+

Remove text between brackets/parentheses for cleaning OSTI text

+
+ +
+
+property authors
+

Get the list of authors of this record.

+
+
Returns:
+

str

+
+
+
+ +
+
+property title
+

Get the title of this record

+
+
Returns:
+

str | None

+
+
+
+ +
+
+property year
+

Get the year of publication of this record

+
+
Returns:
+

str | None

+
+
+
+ +
+
+property date
+

Get the date of publication of this record

+
+
Returns:
+

str | None

+
+
+
+ +
+
+property doi
+

Get the DOI of this record

+
+
Returns:
+

str | None

+
+
+
+ +
+
+property osti_id
+

Get the OSTI ID of this record which is typically a 7 digit number

+
+
Returns:
+

str | None

+
+
+
+ +
+
+property url
+

Get the download URL of this record

+
+
Returns:
+

str | None

+
+
+
+ +
+
+download(fp)[source]
+

Download the PDF of this record

+
+
Parameters:
+

fp (str) – Filepath to download this record to, typically a .pdf

+
+
+
+ +
+
+clear() None.  Remove all items from D.
+
+ +
+
+copy() a shallow copy of D
+
+ +
+
+fromkeys(value=None, /)
+

Create a new dictionary with keys from iterable and values set to value.

+
+ +
+
+get(key, default=None, /)
+

Return the value for key if key is in the dictionary, else default.

+
+ +
+
+items() a set-like object providing a view on D's items
+
+ +
+
+keys() a set-like object providing a view on D's keys
+
+ +
+
+pop(k[, d]) v, remove specified key and return the corresponding value.
+

If key is not found, default is returned if given, otherwise KeyError is raised

+
+ +
+
+popitem()
+

Remove and return a (key, value) pair as a 2-tuple.

+

Pairs are returned in LIFO (last-in, first-out) order. +Raises KeyError if the dict is empty.

+
+ +
+
+setdefault(key, default=None, /)
+

Insert key with a value of default if key is not in the dictionary.

+

Return the value for key if key is in the dictionary, else default.

+
+ +
+
+update([E, ]**F) None.  Update D from dict/iterable E and F.
+

If E is present and has a .keys() method, then does: for k in E: D[k] = E[k] +If E is present and lacks a .keys() method, then does: for k, v in E: D[k] = v +In either case, this is followed by: for k in F: D[k] = F[k]

+
+ +
+
+values() an object providing a view on D's values
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.osti.html b/_autosummary/elm.osti.html new file mode 100644 index 00000000..b6d43b52 --- /dev/null +++ b/_autosummary/elm.osti.html @@ -0,0 +1,193 @@ + + + + + + + elm.osti — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.osti

+

Utilities for retrieving data from OSTI.

+

Classes

+ + + + + + + + + +

OstiList(url[, n_pages])

Class to retrieve and handle multiple OSTI records from an API URL.

OstiRecord(record)

Class to handle a single OSTI record as dictionary data

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.pdf.PDFtoTXT.html b/_autosummary/elm.pdf.PDFtoTXT.html new file mode 100644 index 00000000..f84b5e92 --- /dev/null +++ b/_autosummary/elm.pdf.PDFtoTXT.html @@ -0,0 +1,700 @@ + + + + + + + elm.pdf.PDFtoTXT — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.pdf.PDFtoTXT

+
+
+class PDFtoTXT(fp, page_range=None, model=None)[source]
+

Bases: ApiBase

+

Class to parse text from a PDF document.

+
+
Parameters:
+
    +
  • fp (str) – Filepath to .pdf file to extract.

  • +
  • page_range (None | list) – Optional 2-entry list/tuple to set starting and ending pages +(python indexing)

  • +
  • model (None | str) – Optional specification of OpenAI model to use. Default is +cls.DEFAULT_MODEL

  • +
+
+
+

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

call_api(url, headers, request_json)

Make an asyncronous OpenAI API call.

call_api_async(url, headers, all_request_jsons)

Use GPT to clean raw pdf text in parallel calls to the OpenAI API.

chat(query[, temperature])

Have a continuous chat with the LLM including context from previous chat() calls stored as attributes in this class.

clean_headers([char_thresh, page_thresh, ...])

Clean headers/footers that are duplicated across pages

clean_poppler([layout])

Clean the pdf using the poppler pdftotxt utility

clean_txt()

Use GPT to clean raw pdf text in serial calls to the OpenAI API.

clean_txt_async([ignore_error, rate_limit])

Use GPT to clean raw pdf text in parallel calls to the OpenAI API.

clear()

Clear chat history and reduce messages to just the initial model role message.

combine_pages(pages)

Combine pages of GPT cleaned text into a single string.

count_tokens(text, model)

Return the number of tokens in a string.

generic_async_query(queries[, model_role, ...])

Run a number of generic single queries asynchronously (not conversational)

generic_query(query[, model_role, temperature])

Ask a generic single query without conversation

get_embedding(text)

Get the 1D array (list) embedding of a text string.

is_double_col([separator])

Does the text look like it has multiple vertical text columns?

load_pdf(page_range)

Basic load of pdf to text strings

make_gpt_messages(pdf_raw_text)

Make the chat completion messages list for input to GPT

validate_clean()

Run some basic checks on the GPT cleaned text vs.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +

DEFAULT_MODEL

Default model to do pdf text cleaning.

EMBEDDING_MODEL

Default model to do text embeddings.

EMBEDDING_URL

OpenAI embedding API URL

HEADERS

OpenAI API Headers

MODEL_INSTRUCTION

Instructions to the model with python format braces for pdf text

MODEL_ROLE

High level model role.

URL

OpenAI API API URL

all_messages_txt

Get a string printout of the full conversation with the LLM

+
+
+MODEL_ROLE = 'You clean up poorly formatted text extracted from PDF documents.'
+

High level model role.

+
+ +
+
+MODEL_INSTRUCTION = 'Text extracted from a PDF: \n"""\n{}\n"""\n\nThe text above was extracted from a PDF document. Can you make it nicely formatted? Please only return the formatted text without comments or added information.'
+

Instructions to the model with python format braces for pdf text

+
+ +
+
+load_pdf(page_range)[source]
+

Basic load of pdf to text strings

+
+
Parameters:
+

page_range (None | list) – Optional 2-entry list/tuple to set starting and ending pages +(python indexing)

+
+
Returns:
+

out (list) – List of strings where each entry is a page. This is the raw PDF +text before GPT cleaning

+
+
+
+ +
+
+make_gpt_messages(pdf_raw_text)[source]
+

Make the chat completion messages list for input to GPT

+
+
Parameters:
+

pdf_raw_text (str) – Raw PDF text to be cleaned

+
+
Returns:
+

messages (list) – Messages for OpenAI chat completion model. Typically this looks +like this:

+
+
+
[{“role”: “system”, “content”: “You do this…”},

{“role”: “user”, “content”: “Please do this: {}”}]

+
+
+
+

+
+
+
+ +
+
+clean_txt()[source]
+

Use GPT to clean raw pdf text in serial calls to the OpenAI API.

+
+
Returns:
+

clean_pages (list) – List of clean text strings where each list entry is a page from the +PDF

+
+
+
+ +
+
+async clean_txt_async(ignore_error=None, rate_limit=40000.0)[source]
+

Use GPT to clean raw pdf text in parallel calls to the OpenAI API.

+

NOTE: you need to call this using the await command in ipython or +jupyter, e.g.: out = await PDFtoTXT.clean_txt_async()

+
+
Parameters:
+
    +
  • ignore_error (None | callable) – Optional callable to parse API error string. If the callable +returns True, the error will be ignored, the API call will not be +tried again, and the output will be an empty string.

  • +
  • rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +gpt-3.5-turbo limit is 90k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

  • +
+
+
Returns:
+

clean_pages (list) – List of clean text strings where each list entry is a page from the +PDF

+
+
+
+ +
+
+is_double_col(separator='    ')[source]
+

Does the text look like it has multiple vertical text columns?

+
+
Parameters:
+

separator (str) – Heuristic split string to look for spaces between columns

+
+
Returns:
+

out (bool) – True if more than one vertical text column

+
+
+
+ +
+
+clean_poppler(layout=True)[source]
+

Clean the pdf using the poppler pdftotxt utility

+
+
Requires the pdftotext command line utility from this software:

https://poppler.freedesktop.org/

+
+
+
+
Parameters:
+

layout (bool) – Layout flag for poppler pdftotxt utility: “maintain original +physical layout”. Layout=True works well for single column text, +layout=False collapses the double columns into single columns which +works better for downstream chunking and LLM work.

+
+
Returns:
+

out (str) – Joined cleaned pages

+
+
+
+ +
+
+validate_clean()[source]
+

Run some basic checks on the GPT cleaned text vs. the raw text

+
+ +
+
+static combine_pages(pages)[source]
+

Combine pages of GPT cleaned text into a single string.

+
+
Parameters:
+

pages (list) – List of clean text strings where each list entry is a page from the +PDF

+
+
Returns:
+

full (str) – Single multi-page string

+
+
+
+ +
+
+DEFAULT_MODEL = 'gpt-3.5-turbo'
+

Default model to do pdf text cleaning.

+
+ +
+
+EMBEDDING_MODEL = 'text-embedding-ada-002'
+

Default model to do text embeddings.

+
+ +
+
+EMBEDDING_URL = 'https://api.openai.com/v1/embeddings'
+

OpenAI embedding API URL

+
+ +
+
+HEADERS = {'Authorization': 'Bearer None', 'Content-Type': 'application/json', 'api-key': 'None'}
+

OpenAI API Headers

+
+ +
+
+URL = 'https://api.openai.com/v1/chat/completions'
+

OpenAI API API URL

+
+ +
+
+property all_messages_txt
+

Get a string printout of the full conversation with the LLM

+
+
Returns:
+

str

+
+
+
+ +
+
+async static call_api(url, headers, request_json)
+

Make an asyncronous OpenAI API call.

+
+
Parameters:
+
    +
  • url (str) –

    +
    +
    OpenAI API url, typically either:

    https://api.openai.com/v1/embeddings +https://api.openai.com/v1/chat/completions

    +
    +
    +
  • +
  • headers (dict) –

    +
    +
    OpenAI API headers, typically:
    +
    {“Content-Type”: “application/json”,

    “Authorization”: f”Bearer {openai.api_key}”}

    +
    +
    +
    +
    +
  • +
  • request_json (dict) –

    +
    +
    API data input, typically looks like this for chat completion:
    +
    {“model”: “gpt-3.5-turbo”,
    +
    “messages”: [{“role”: “system”, “content”: “You do this…”},

    {“role”: “user”, “content”: “Do this: {}”}],

    +
    +
    +

    “temperature”: 0.0}

    +
    +
    +
    +
    +
  • +
+
+
Returns:
+

out (dict) – API response in json format

+
+
+
+ +
+
+async call_api_async(url, headers, all_request_jsons, ignore_error=None, rate_limit=40000.0)
+

Use GPT to clean raw pdf text in parallel calls to the OpenAI API.

+

NOTE: you need to call this using the await command in ipython or +jupyter, e.g.: out = await PDFtoTXT.clean_txt_async()

+
+
Parameters:
+
    +
  • url (str) –

    +
    +
    OpenAI API url, typically either:

    https://api.openai.com/v1/embeddings +https://api.openai.com/v1/chat/completions

    +
    +
    +
  • +
  • headers (dict) –

    +
    +
    OpenAI API headers, typically:
    +
    {“Content-Type”: “application/json”,

    “Authorization”: f”Bearer {openai.api_key}”}

    +
    +
    +
    +
    +
  • +
  • all_request_jsons (list) – List of API data input, one entry typically looks like this for +chat completion:

    +
    +
    +
    {“model”: “gpt-3.5-turbo”,
    +
    “messages”: [{“role”: “system”, “content”: “You do this…”},

    {“role”: “user”, “content”: “Do this: {}”}],

    +
    +
    +

    “temperature”: 0.0}

    +
    +
    +
    +
  • +
  • ignore_error (None | callable) – Optional callable to parse API error string. If the callable +returns True, the error will be ignored, the API call will not be +tried again, and the output will be an empty string.

  • +
  • rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +gpt-3.5-turbo limit is 90k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

  • +
+
+
Returns:
+

out (list) – List of API outputs where each list entry is a GPT answer from the +corresponding message in the all_request_jsons input.

+
+
+
+ +
+
+chat(query, temperature=0)
+

Have a continuous chat with the LLM including context from previous +chat() calls stored as attributes in this class.

+
+
Parameters:
+
    +
  • query (str) – Question to ask ChatGPT

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
+
+
Returns:
+

response (str) – Model response

+
+
+
+ +
+
+clear()
+

Clear chat history and reduce messages to just the initial model +role message.

+
+ +
+
+static count_tokens(text, model)
+

Return the number of tokens in a string.

+
+
Parameters:
+
    +
  • text (str) – Text string to get number of tokens for

  • +
  • model (str) – specification of OpenAI model to use (e.g., “gpt-3.5-turbo”)

  • +
+
+
Returns:
+

n (int) – Number of tokens in text

+
+
+
+ +
+
+async generic_async_query(queries, model_role=None, temperature=0, ignore_error=None, rate_limit=40000.0)
+

Run a number of generic single queries asynchronously +(not conversational)

+

NOTE: you need to call this using the await command in ipython or +jupyter, e.g.: out = await Summary.run_async()

+
+
Parameters:
+
    +
  • query (list) – Questions to ask ChatGPT (list of strings)

  • +
  • model_role (str | None) – Role for the model to take, e.g.: “You are a research assistant”. +This defaults to self.MODEL_ROLE

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
  • ignore_error (None | callable) – Optional callable to parse API error string. If the callable +returns True, the error will be ignored, the API call will not be +tried again, and the output will be an empty string.

  • +
  • rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +gpt-3.5-turbo limit is 90k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

  • +
+
+
Returns:
+

response (list) – Model responses with same length as query input.

+
+
+
+ +
+
+generic_query(query, model_role=None, temperature=0)
+

Ask a generic single query without conversation

+
+
Parameters:
+
    +
  • query (str) – Question to ask ChatGPT

  • +
  • model_role (str | None) – Role for the model to take, e.g.: “You are a research assistant”. +This defaults to self.MODEL_ROLE

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
+
+
Returns:
+

response (str) – Model response

+
+
+
+ +
+
+classmethod get_embedding(text)
+

Get the 1D array (list) embedding of a text string.

+
+
Parameters:
+

text (str) – Text to embed

+
+
Returns:
+

embedding (list) – List of float that represents the numerical embedding of the text

+
+
+
+ +
+
+clean_headers(char_thresh=0.6, page_thresh=0.8, split_on='\n', iheaders=(0, 1, -2, -1))[source]
+

Clean headers/footers that are duplicated across pages

+
+
Parameters:
+
    +
  • char_thresh (float) – Fraction of characters in a given header that are similar between +pages to be considered for removal

  • +
  • page_thresh (float) – Fraction of pages that share the header to be considered for +removal

  • +
  • split_on (str) – Chars to split lines of a page on

  • +
  • iheaders (list | tuple) – Integer indices to look for headers after splitting a page into +lines based on split_on. This needs to go from the start of the +page to the end.

  • +
+
+
Returns:
+

out (str) – Clean text with all pages joined

+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.pdf.html b/_autosummary/elm.pdf.html new file mode 100644 index 00000000..65d6baa6 --- /dev/null +++ b/_autosummary/elm.pdf.html @@ -0,0 +1,190 @@ + + + + + + + elm.pdf — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.pdf

+

ELM PDF to text parser

+

Classes

+ + + + + + +

PDFtoTXT(fp[, page_range, model])

Class to parse text from a PDF document.

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.summary.Summary.html b/_autosummary/elm.summary.Summary.html new file mode 100644 index 00000000..6cc0b9a6 --- /dev/null +++ b/_autosummary/elm.summary.Summary.html @@ -0,0 +1,600 @@ + + + + + + + elm.summary.Summary — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.summary.Summary

+
+
+class Summary(text, model=None, n_words=500, **chunk_kwargs)[source]
+

Bases: ApiBase

+

Interface to perform Recursive Summarization and Distillation of +research text

+
+
Parameters:
+
    +
  • text (str | list) – Single body of text to chunk up using elm.Chunker or a pre-chunked +list of strings. Works well if this is a single document with empty +lines between paragraphs.

  • +
  • model (str) – GPT model name, default is the DEFAULT_MODEL global var

  • +
  • n_words (int) – Desired length of the output text. Note that this is never perfect +but helps guide the LLM to an approximate desired output length. +400-600 words seems to work quite well with GPT-4. This gets +formatted into the MODEL_INSTRUCTION attribute.

  • +
  • chunk_kwargs (dict | None) – kwargs for initialization of elm.chunk.Chunker

  • +
+
+
+

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

call_api(url, headers, request_json)

Make an asyncronous OpenAI API call.

call_api_async(url, headers, all_request_jsons)

Use GPT to clean raw pdf text in parallel calls to the OpenAI API.

chat(query[, temperature])

Have a continuous chat with the LLM including context from previous chat() calls stored as attributes in this class.

clear()

Clear chat history and reduce messages to just the initial model role message.

combine(text_summary)

Combine separate chunk summaries into one more comprehensive narrative

count_tokens(text, model)

Return the number of tokens in a string.

generic_async_query(queries[, model_role, ...])

Run a number of generic single queries asynchronously (not conversational)

generic_query(query[, model_role, temperature])

Ask a generic single query without conversation

get_embedding(text)

Get the 1D array (list) embedding of a text string.

run([temperature, fancy_combine])

Use GPT to do a summary of input text.

run_async([temperature, ignore_error, ...])

Run text summary asynchronously for all text chunks

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +

DEFAULT_MODEL

Default model to do pdf text cleaning.

EMBEDDING_MODEL

Default model to do text embeddings.

EMBEDDING_URL

OpenAI embedding API URL

HEADERS

OpenAI API Headers

MODEL_INSTRUCTION

Prefix to the engineered prompt.

MODEL_ROLE

High level model role, somewhat redundant to MODEL_INSTRUCTION

URL

OpenAI API API URL

all_messages_txt

Get a string printout of the full conversation with the LLM

+
+
+MODEL_ROLE = 'You are an energy scientist summarizing prior research'
+

High level model role, somewhat redundant to MODEL_INSTRUCTION

+
+ +
+
+MODEL_INSTRUCTION = 'Can you please summarize the text quoted above in {n_words} words?\n\n"""\n{text_chunk}\n"""'
+

Prefix to the engineered prompt. The format args text_chunk and +n_words will be formatted by the Summary class at runtime. text_chunk +will be provided by the Summary text chunks, n_words is an initialization +argument for the Summary class.

+
+ +
+
+DEFAULT_MODEL = 'gpt-3.5-turbo'
+

Default model to do pdf text cleaning.

+
+ +
+
+EMBEDDING_MODEL = 'text-embedding-ada-002'
+

Default model to do text embeddings.

+
+ +
+
+EMBEDDING_URL = 'https://api.openai.com/v1/embeddings'
+

OpenAI embedding API URL

+
+ +
+
+HEADERS = {'Authorization': 'Bearer None', 'Content-Type': 'application/json', 'api-key': 'None'}
+

OpenAI API Headers

+
+ +
+
+URL = 'https://api.openai.com/v1/chat/completions'
+

OpenAI API API URL

+
+ +
+
+property all_messages_txt
+

Get a string printout of the full conversation with the LLM

+
+
Returns:
+

str

+
+
+
+ +
+
+async static call_api(url, headers, request_json)
+

Make an asyncronous OpenAI API call.

+
+
Parameters:
+
    +
  • url (str) –

    +
    +
    OpenAI API url, typically either:

    https://api.openai.com/v1/embeddings +https://api.openai.com/v1/chat/completions

    +
    +
    +
  • +
  • headers (dict) –

    +
    +
    OpenAI API headers, typically:
    +
    {“Content-Type”: “application/json”,

    “Authorization”: f”Bearer {openai.api_key}”}

    +
    +
    +
    +
    +
  • +
  • request_json (dict) –

    +
    +
    API data input, typically looks like this for chat completion:
    +
    {“model”: “gpt-3.5-turbo”,
    +
    “messages”: [{“role”: “system”, “content”: “You do this…”},

    {“role”: “user”, “content”: “Do this: {}”}],

    +
    +
    +

    “temperature”: 0.0}

    +
    +
    +
    +
    +
  • +
+
+
Returns:
+

out (dict) – API response in json format

+
+
+
+ +
+
+async call_api_async(url, headers, all_request_jsons, ignore_error=None, rate_limit=40000.0)
+

Use GPT to clean raw pdf text in parallel calls to the OpenAI API.

+

NOTE: you need to call this using the await command in ipython or +jupyter, e.g.: out = await PDFtoTXT.clean_txt_async()

+
+
Parameters:
+
    +
  • url (str) –

    +
    +
    OpenAI API url, typically either:

    https://api.openai.com/v1/embeddings +https://api.openai.com/v1/chat/completions

    +
    +
    +
  • +
  • headers (dict) –

    +
    +
    OpenAI API headers, typically:
    +
    {“Content-Type”: “application/json”,

    “Authorization”: f”Bearer {openai.api_key}”}

    +
    +
    +
    +
    +
  • +
  • all_request_jsons (list) – List of API data input, one entry typically looks like this for +chat completion:

    +
    +
    +
    {“model”: “gpt-3.5-turbo”,
    +
    “messages”: [{“role”: “system”, “content”: “You do this…”},

    {“role”: “user”, “content”: “Do this: {}”}],

    +
    +
    +

    “temperature”: 0.0}

    +
    +
    +
    +
  • +
  • ignore_error (None | callable) – Optional callable to parse API error string. If the callable +returns True, the error will be ignored, the API call will not be +tried again, and the output will be an empty string.

  • +
  • rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +gpt-3.5-turbo limit is 90k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

  • +
+
+
Returns:
+

out (list) – List of API outputs where each list entry is a GPT answer from the +corresponding message in the all_request_jsons input.

+
+
+
+ +
+
+chat(query, temperature=0)
+

Have a continuous chat with the LLM including context from previous +chat() calls stored as attributes in this class.

+
+
Parameters:
+
    +
  • query (str) – Question to ask ChatGPT

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
+
+
Returns:
+

response (str) – Model response

+
+
+
+ +
+
+clear()
+

Clear chat history and reduce messages to just the initial model +role message.

+
+ +
+
+static count_tokens(text, model)
+

Return the number of tokens in a string.

+
+
Parameters:
+
    +
  • text (str) – Text string to get number of tokens for

  • +
  • model (str) – specification of OpenAI model to use (e.g., “gpt-3.5-turbo”)

  • +
+
+
Returns:
+

n (int) – Number of tokens in text

+
+
+
+ +
+
+async generic_async_query(queries, model_role=None, temperature=0, ignore_error=None, rate_limit=40000.0)
+

Run a number of generic single queries asynchronously +(not conversational)

+

NOTE: you need to call this using the await command in ipython or +jupyter, e.g.: out = await Summary.run_async()

+
+
Parameters:
+
    +
  • query (list) – Questions to ask ChatGPT (list of strings)

  • +
  • model_role (str | None) – Role for the model to take, e.g.: “You are a research assistant”. +This defaults to self.MODEL_ROLE

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
  • ignore_error (None | callable) – Optional callable to parse API error string. If the callable +returns True, the error will be ignored, the API call will not be +tried again, and the output will be an empty string.

  • +
  • rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +gpt-3.5-turbo limit is 90k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

  • +
+
+
Returns:
+

response (list) – Model responses with same length as query input.

+
+
+
+ +
+
+generic_query(query, model_role=None, temperature=0)
+

Ask a generic single query without conversation

+
+
Parameters:
+
    +
  • query (str) – Question to ask ChatGPT

  • +
  • model_role (str | None) – Role for the model to take, e.g.: “You are a research assistant”. +This defaults to self.MODEL_ROLE

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
+
+
Returns:
+

response (str) – Model response

+
+
+
+ +
+
+classmethod get_embedding(text)
+

Get the 1D array (list) embedding of a text string.

+
+
Parameters:
+

text (str) – Text to embed

+
+
Returns:
+

embedding (list) – List of float that represents the numerical embedding of the text

+
+
+
+ +
+
+combine(text_summary)[source]
+

Combine separate chunk summaries into one more comprehensive +narrative

+
+
Parameters:
+

summary (str) – Summary of text. May be several disjointed paragraphs

+
+
Returns:
+

summary (str) – Summary of text. Paragraphs will be more cohesive.

+
+
+
+ +
+
+run(temperature=0, fancy_combine=True)[source]
+

Use GPT to do a summary of input text.

+
+
Parameters:
+
    +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
  • fancy_combine (bool) – Flag to use the GPT model to combine the separate outputs into a +cohesive summary.

  • +
+
+
Returns:
+

summary (str) – Summary of text.

+
+
+
+ +
+
+async run_async(temperature=0, ignore_error=None, rate_limit=40000.0, fancy_combine=True)[source]
+

Run text summary asynchronously for all text chunks

+

NOTE: you need to call this using the await command in ipython or +jupyter, e.g.: out = await Summary.run_async()

+
+
Parameters:
+
    +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
  • ignore_error (None | callable) – Optional callable to parse API error string. If the callable +returns True, the error will be ignored, the API call will not be +tried again, and the output will be an empty string.

  • +
  • rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +gpt-3.5-turbo limit is 90k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

  • +
  • fancy_combine (bool) – Flag to use the GPT model to combine the separate outputs into a +cohesive summary.

  • +
+
+
Returns:
+

summary (str) – Summary of text.

+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.summary.html b/_autosummary/elm.summary.html new file mode 100644 index 00000000..903b9292 --- /dev/null +++ b/_autosummary/elm.summary.html @@ -0,0 +1,190 @@ + + + + + + + elm.summary — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.summary

+

Research Summarization and Distillation with LLMs

+

Classes

+ + + + + + +

Summary(text[, model, n_words])

Interface to perform Recursive Summarization and Distillation of research text

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.tree.DecisionTree.html b/_autosummary/elm.tree.DecisionTree.html new file mode 100644 index 00000000..09f7d587 --- /dev/null +++ b/_autosummary/elm.tree.DecisionTree.html @@ -0,0 +1,346 @@ + + + + + + + elm.tree.DecisionTree — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.tree.DecisionTree

+
+
+class DecisionTree(graph)[source]
+

Bases: object

+

Class to traverse a directed graph of LLM prompts. Nodes are +prompts and edges are transitions between prompts based on conditions +being met in the LLM response.

+

Class to traverse a directed graph of LLM prompts. Nodes are +prompts and edges are transitions between prompts based on conditions +being met in the LLM response.

+

Examples

+

Here’s a simple example to setup a decision tree graph and run with the +DecisionTree class:

+
>>> import logging
+>>> import networkx as nx
+>>> from rex import init_logger
+>>> from elm.base import ApiBase
+>>> from elm.tree import DecisionTree
+>>>
+>>> init_logger('elm.tree')
+>>>
+>>> G = nx.DiGraph(text='hello', name='Grant',
+                   api=ApiBase(model='gpt-35-turbo'))
+>>>
+>>> G.add_node('init', prompt='Say {text} to {name}')
+>>> G.add_edge('init', 'next', condition=lambda x: 'Grant' in x)
+>>> G.add_node('next', prompt='How are you?')
+>>>
+>>> tree = DecisionTree(G)
+>>> out = tree.run()
+>>>
+>>> print(tree.all_messages_txt)
+
+
+
+
Parameters:
+

graph (nx.DiGraph) – Directed acyclic graph where nodes are LLM prompts and edges are +logical transitions based on the response. Must have high-level +graph attribute “api” which is an ApiBase instance. Nodes should +have attribute “prompt” which can have {format} named arguments +that will be filled from the high-level graph attributes. Edges can +have attribute “condition” that is a callable to be executed on the +LLM response text. An edge from a node without a condition acts as +an “else” statement if no other edge conditions are satisfied. A +single edge from node to node does not need a condition.

+
+
+

Methods

+ + + + + + + + + +

call_node(node0)

Call the LLM with the prompt from the input node and search the successor edges for a valid transition condition

run([node0])

Traverse the decision tree starting at the input node.

+

Attributes

+ + + + + + + + + + + + + + + + + + +

all_messages_txt

Get a printout of the full conversation with the LLM

api

Get the ApiBase object.

graph

Get the networkx graph object

history

Get a record of the nodes traversed in the tree

messages

Get a list of the conversation messages with the LLM.

+
+
+property api
+

Get the ApiBase object.

+
+
Returns:
+

ApiBase

+
+
+
+ +
+
+property messages
+

Get a list of the conversation messages with the LLM.

+
+
Returns:
+

list

+
+
+
+ +
+
+property all_messages_txt
+

Get a printout of the full conversation with the LLM

+
+
Returns:
+

str

+
+
+
+ +
+
+property history
+

Get a record of the nodes traversed in the tree

+
+
Returns:
+

list

+
+
+
+ +
+
+property graph
+

Get the networkx graph object

+
+
Returns:
+

nx.DiGraph

+
+
+
+ +
+
+call_node(node0)[source]
+

Call the LLM with the prompt from the input node and search the +successor edges for a valid transition condition

+
+
Parameters:
+

node0 (str) – Name of node being executed.

+
+
Returns:
+

out (str) – Next node or LLM response if at a leaf node.

+
+
+
+ +
+
+run(node0='init')[source]
+

Traverse the decision tree starting at the input node.

+
+
Parameters:
+

node0 (str) – Name of starting node in the graph. This is typically called “init”

+
+
Returns:
+

out (str) – Final response from LLM at the leaf node.

+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.tree.html b/_autosummary/elm.tree.html new file mode 100644 index 00000000..2c69f722 --- /dev/null +++ b/_autosummary/elm.tree.html @@ -0,0 +1,190 @@ + + + + + + + elm.tree — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.tree

+

ELM decision trees.

+

Classes

+ + + + + + +

DecisionTree(graph)

Class to traverse a directed graph of LLM prompts.

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.version.html b/_autosummary/elm.version.html new file mode 100644 index 00000000..9cbdf0b3 --- /dev/null +++ b/_autosummary/elm.version.html @@ -0,0 +1,182 @@ + + + + + + + elm.version — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.version

+

ELM version number

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.wizard.EnergyWizard.html b/_autosummary/elm.wizard.EnergyWizard.html new file mode 100644 index 00000000..cd9cdad4 --- /dev/null +++ b/_autosummary/elm.wizard.EnergyWizard.html @@ -0,0 +1,657 @@ + + + + + + + elm.wizard.EnergyWizard — elm 0.0.1 documentation + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.wizard.EnergyWizard

+
+
+class EnergyWizard(corpus, model=None, token_budget=3500, ref_col=None)[source]
+

Bases: ApiBase

+

Interface to ask OpenAI LLMs about energy research.

+
+
Parameters:
+
    +
  • corpus (pd.DataFrame) – Corpus of text in dataframe format. Must have columns “text” and +“embedding”.

  • +
  • model (str) – GPT model name, default is the DEFAULT_MODEL global var

  • +
  • token_budget (int) – Number of tokens that can be embedded in the prompt. Note that the +default budget for GPT-3.5-Turbo is 4096, but you want to subtract +some tokens to account for the response budget.

  • +
  • ref_col (None | str) – Optional column label in the corpus that provides a reference text +string for each chunk of text.

  • +
+
+
+

Methods

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

call_api(url, headers, request_json)

Make an asyncronous OpenAI API call.

call_api_async(url, headers, all_request_jsons)

Use GPT to clean raw pdf text in parallel calls to the OpenAI API.

chat(query[, debug, stream, temperature, ...])

Answers a query by doing a semantic search of relevant text with embeddings and then sending engineered query to the LLM.

clear()

Clear chat history and reduce messages to just the initial model role message.

cosine_dist(query_embedding)

Compute the cosine distance of the query embedding array vs.

count_tokens(text, model)

Return the number of tokens in a string.

engineer_query(query[, token_budget, ...])

Engineer a query for GPT using the corpus of information

generic_async_query(queries[, model_role, ...])

Run a number of generic single queries asynchronously (not conversational)

generic_query(query[, model_role, temperature])

Ask a generic single query without conversation

get_embedding(text)

Get the 1D array (list) embedding of a text string.

make_ref_list(idx)

Make a reference list

preflight_corpus(corpus[, required])

Run preflight checks on the text corpus.

rank_strings(query[, top_n])

Returns a list of strings and relatednesses, sorted from most related to least.

+

Attributes

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +

DEFAULT_MODEL

Default model to do pdf text cleaning.

EMBEDDING_MODEL

Default model to do text embeddings.

EMBEDDING_URL

OpenAI embedding API URL

HEADERS

OpenAI API Headers

MODEL_INSTRUCTION

Prefix to the engineered prompt

MODEL_ROLE

High level model role, somewhat redundant to MODEL_INSTRUCTION

URL

OpenAI API API URL

all_messages_txt

Get a string printout of the full conversation with the LLM

+
+
+MODEL_ROLE = 'You parse through articles to answer questions.'
+

High level model role, somewhat redundant to MODEL_INSTRUCTION

+
+ +
+
+MODEL_INSTRUCTION = 'Use the information below to answer the subsequent question. If the answer cannot be found in the text, write "I could not find an answer."'
+

Prefix to the engineered prompt

+
+ +
+
+static preflight_corpus(corpus, required=('text', 'embedding'))[source]
+

Run preflight checks on the text corpus.

+
+
Parameters:
+
    +
  • corpus (pd.DataFrame) – Corpus of text in dataframe format. Must have columns “text” and +“embedding”.

  • +
  • required (list | tuple) – Column names required to be in the corpus df

  • +
+
+
Returns:
+

corpus (pd.DataFrame) – Corpus of text in dataframe format. Must have columns “text” and +“embedding”.

+
+
+
+ +
+
+cosine_dist(query_embedding)[source]
+

Compute the cosine distance of the query embedding array vs. all of +the embedding arrays of the full text corpus

+
+
Parameters:
+

query_embedding (np.ndarray) – 1D array of the numerical embedding of the request query.

+
+
Returns:
+

out (np.ndarray) – 1D array with length equal to the number of entries in the text +corpus. Each value is a distance score where smaller is closer

+
+
+
+ +
+
+rank_strings(query, top_n=100)[source]
+

Returns a list of strings and relatednesses, sorted from most +related to least.

+
+
Parameters:
+
    +
  • query (str) – Question being asked of GPT

  • +
  • top_n (int) – Number of top results to return.

  • +
+
+
Returns:
+

    +
  • strings (np.ndarray) – 1D array of related strings

  • +
  • score (np.ndarray) – 1D array of float scores of strings

  • +
  • idx (np.ndarray) – 1D array of indices in the text corpus corresponding to the +ranked strings/scores outputs.

  • +
+

+
+
+
+ +
+
+engineer_query(query, token_budget=None, new_info_threshold=0.7, convo=False)[source]
+

Engineer a query for GPT using the corpus of information

+
+
Parameters:
+
    +
  • query (str) – Question being asked of GPT

  • +
  • token_budget (int) – Option to override the class init token budget.

  • +
  • new_info_threshold (float) – New text added to the engineered query must contain at least this +much new information. This helps prevent (for example) the table of +contents being added multiple times.

  • +
  • convo (bool) – Flag to perform semantic search with full conversation history +(True) or just the single query (False). Call EnergyWizard.clear() +to reset the chat history.

  • +
+
+
Returns:
+

    +
  • message (str) – Engineered question to GPT including information from corpus and +the original query

  • +
  • references (list) – The list of references (strs) used in the engineered prompt is +returned here

  • +
+

+
+
+
+ +
+
+DEFAULT_MODEL = 'gpt-3.5-turbo'
+

Default model to do pdf text cleaning.

+
+ +
+
+EMBEDDING_MODEL = 'text-embedding-ada-002'
+

Default model to do text embeddings.

+
+ +
+
+EMBEDDING_URL = 'https://api.openai.com/v1/embeddings'
+

OpenAI embedding API URL

+
+ +
+
+HEADERS = {'Authorization': 'Bearer None', 'Content-Type': 'application/json', 'api-key': 'None'}
+

OpenAI API Headers

+
+ +
+
+URL = 'https://api.openai.com/v1/chat/completions'
+

OpenAI API API URL

+
+ +
+
+property all_messages_txt
+

Get a string printout of the full conversation with the LLM

+
+
Returns:
+

str

+
+
+
+ +
+
+async static call_api(url, headers, request_json)
+

Make an asyncronous OpenAI API call.

+
+
Parameters:
+
    +
  • url (str) –

    +
    +
    OpenAI API url, typically either:

    https://api.openai.com/v1/embeddings +https://api.openai.com/v1/chat/completions

    +
    +
    +
  • +
  • headers (dict) –

    +
    +
    OpenAI API headers, typically:
    +
    {“Content-Type”: “application/json”,

    “Authorization”: f”Bearer {openai.api_key}”}

    +
    +
    +
    +
    +
  • +
  • request_json (dict) –

    +
    +
    API data input, typically looks like this for chat completion:
    +
    {“model”: “gpt-3.5-turbo”,
    +
    “messages”: [{“role”: “system”, “content”: “You do this…”},

    {“role”: “user”, “content”: “Do this: {}”}],

    +
    +
    +

    “temperature”: 0.0}

    +
    +
    +
    +
    +
  • +
+
+
Returns:
+

out (dict) – API response in json format

+
+
+
+ +
+
+async call_api_async(url, headers, all_request_jsons, ignore_error=None, rate_limit=40000.0)
+

Use GPT to clean raw pdf text in parallel calls to the OpenAI API.

+

NOTE: you need to call this using the await command in ipython or +jupyter, e.g.: out = await PDFtoTXT.clean_txt_async()

+
+
Parameters:
+
    +
  • url (str) –

    +
    +
    OpenAI API url, typically either:

    https://api.openai.com/v1/embeddings +https://api.openai.com/v1/chat/completions

    +
    +
    +
  • +
  • headers (dict) –

    +
    +
    OpenAI API headers, typically:
    +
    {“Content-Type”: “application/json”,

    “Authorization”: f”Bearer {openai.api_key}”}

    +
    +
    +
    +
    +
  • +
  • all_request_jsons (list) – List of API data input, one entry typically looks like this for +chat completion:

    +
    +
    +
    {“model”: “gpt-3.5-turbo”,
    +
    “messages”: [{“role”: “system”, “content”: “You do this…”},

    {“role”: “user”, “content”: “Do this: {}”}],

    +
    +
    +

    “temperature”: 0.0}

    +
    +
    +
    +
  • +
  • ignore_error (None | callable) – Optional callable to parse API error string. If the callable +returns True, the error will be ignored, the API call will not be +tried again, and the output will be an empty string.

  • +
  • rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +gpt-3.5-turbo limit is 90k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

  • +
+
+
Returns:
+

out (list) – List of API outputs where each list entry is a GPT answer from the +corresponding message in the all_request_jsons input.

+
+
+
+ +
+
+clear()
+

Clear chat history and reduce messages to just the initial model +role message.

+
+ +
+
+static count_tokens(text, model)
+

Return the number of tokens in a string.

+
+
Parameters:
+
    +
  • text (str) – Text string to get number of tokens for

  • +
  • model (str) – specification of OpenAI model to use (e.g., “gpt-3.5-turbo”)

  • +
+
+
Returns:
+

n (int) – Number of tokens in text

+
+
+
+ +
+
+async generic_async_query(queries, model_role=None, temperature=0, ignore_error=None, rate_limit=40000.0)
+

Run a number of generic single queries asynchronously +(not conversational)

+

NOTE: you need to call this using the await command in ipython or +jupyter, e.g.: out = await Summary.run_async()

+
+
Parameters:
+
    +
  • query (list) – Questions to ask ChatGPT (list of strings)

  • +
  • model_role (str | None) – Role for the model to take, e.g.: “You are a research assistant”. +This defaults to self.MODEL_ROLE

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
  • ignore_error (None | callable) – Optional callable to parse API error string. If the callable +returns True, the error will be ignored, the API call will not be +tried again, and the output will be an empty string.

  • +
  • rate_limit (float) – OpenAI API rate limit (tokens / minute). Note that the +gpt-3.5-turbo limit is 90k as of 4/2023, but we’re using a large +factor of safety (~1/2) because we can only count the tokens on the +input side and assume the output is about the same count.

  • +
+
+
Returns:
+

response (list) – Model responses with same length as query input.

+
+
+
+ +
+
+generic_query(query, model_role=None, temperature=0)
+

Ask a generic single query without conversation

+
+
Parameters:
+
    +
  • query (str) – Question to ask ChatGPT

  • +
  • model_role (str | None) – Role for the model to take, e.g.: “You are a research assistant”. +This defaults to self.MODEL_ROLE

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
+
+
Returns:
+

response (str) – Model response

+
+
+
+ +
+
+classmethod get_embedding(text)
+

Get the 1D array (list) embedding of a text string.

+
+
Parameters:
+

text (str) – Text to embed

+
+
Returns:
+

embedding (list) – List of float that represents the numerical embedding of the text

+
+
+
+ +
+
+make_ref_list(idx)[source]
+

Make a reference list

+
+
Parameters:
+

used_index (np.ndarray) – Indices of the used text from the text corpus

+
+
Returns:
+

ref_list (list) – A list of references (strs) used.

+
+
+
+ +
+
+chat(query, debug=True, stream=True, temperature=0, convo=False, token_budget=None, new_info_threshold=0.7, print_references=False, return_chat_obj=False)[source]
+

Answers a query by doing a semantic search of relevant text with +embeddings and then sending engineered query to the LLM.

+
+
Parameters:
+
    +
  • query (str) – Question being asked of EnergyWizard

  • +
  • debug (bool) – Flag to return extra diagnostics on the engineered question.

  • +
  • stream (bool) – Flag to print subsequent chunks of the response in a streaming +fashion

  • +
  • temperature (float) – GPT model temperature, a measure of response entropy from 0 to 1. 0 +is more reliable and nearly deterministic; 1 will give the model +more creative freedom and may not return as factual of results.

  • +
  • convo (bool) – Flag to perform semantic search with full conversation history +(True) or just the single query (False). Call EnergyWizard.clear() +to reset the chat history.

  • +
  • token_budget (int) – Option to override the class init token budget.

  • +
  • new_info_threshold (float) – New text added to the engineered query must contain at least this +much new information. This helps prevent (for example) the table of +contents being added multiple times.

  • +
  • print_references (bool) – Flag to print references if EnergyWizard is initialized with a +valid ref_col.

  • +
  • return_chat_obj (bool) – Flag to only return the ChatCompletion from OpenAI API.

  • +
+
+
Returns:
+

    +
  • response (str) – GPT output / answer.

  • +
  • query (str) – If debug is True, the engineered query asked of GPT will also be +returned here

  • +
  • references (list) – If debug is True, the list of references (strs) used in the +engineered prompt is returned here

  • +
+

+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_autosummary/elm.wizard.html b/_autosummary/elm.wizard.html new file mode 100644 index 00000000..e51bd515 --- /dev/null +++ b/_autosummary/elm.wizard.html @@ -0,0 +1,190 @@ + + + + + + + elm.wizard — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

elm.wizard

+

ELM energy wizard

+

Classes

+ + + + + + +

EnergyWizard(corpus[, model, token_budget, ...])

Interface to ask OpenAI LLMs about energy research.

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/elm/base.html b/_modules/elm/base.html new file mode 100644 index 00000000..b4a4f5d7 --- /dev/null +++ b/_modules/elm/base.html @@ -0,0 +1,760 @@ + + + + + + elm.base — elm 0.0.1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for elm.base

+# -*- coding: utf-8 -*-
+"""
+ELM abstract class for API calls
+"""
+from abc import ABC
+import numpy as np
+import asyncio
+import aiohttp
+import openai
+import requests
+import tiktoken
+import time
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+
+[docs] +class ApiBase(ABC): + """Class to parse text from a PDF document.""" + + DEFAULT_MODEL = 'gpt-3.5-turbo' + """Default model to do pdf text cleaning.""" + + EMBEDDING_MODEL = 'text-embedding-ada-002' + """Default model to do text embeddings.""" + + EMBEDDING_URL = 'https://api.openai.com/v1/embeddings' + """OpenAI embedding API URL""" + + URL = 'https://api.openai.com/v1/chat/completions' + """OpenAI API API URL""" + + HEADERS = {"Content-Type": "application/json", + "Authorization": f"Bearer {openai.api_key}", + "api-key": f"{openai.api_key}", + } + """OpenAI API Headers""" + + MODEL_ROLE = "You are a research assistant that answers questions." + """High level model role""" + + def __init__(self, model=None): + """ + Parameters + ---------- + model : None | str + Optional specification of OpenAI model to use. Default is + cls.DEFAULT_MODEL + """ + self.model = model or self.DEFAULT_MODEL + self.api_queue = None + self.messages = [] + self.clear() + + @property + def all_messages_txt(self): + """Get a string printout of the full conversation with the LLM + + Returns + ------- + str + """ + messages = [f"{msg['role'].upper()}: {msg['content']}" + for msg in self.messages] + messages = '\n\n'.join(messages) + return messages + +
+[docs] + def clear(self): + """Clear chat history and reduce messages to just the initial model + role message.""" + self.messages = [{"role": "system", "content": self.MODEL_ROLE}]
+ + +
+[docs] + @staticmethod + async def call_api(url, headers, request_json): + """Make an asyncronous OpenAI API call. + + Parameters + ---------- + url : str + OpenAI API url, typically either: + https://api.openai.com/v1/embeddings + https://api.openai.com/v1/chat/completions + headers : dict + OpenAI API headers, typically: + {"Content-Type": "application/json", + "Authorization": f"Bearer {openai.api_key}"} + request_json : dict + API data input, typically looks like this for chat completion: + {"model": "gpt-3.5-turbo", + "messages": [{"role": "system", "content": "You do this..."}, + {"role": "user", "content": "Do this: {}"}], + "temperature": 0.0} + + Returns + ------- + out : dict + API response in json format + """ + + out = None + kwargs = dict(url=url, headers=headers, json=request_json) + try: + async with aiohttp.ClientSession() as session: + async with session.post(**kwargs) as response: + out = await response.json() + + except Exception as e: + logger.debug(f'Error in OpenAI API call from ' + f'`aiohttp.ClientSession().post(**kwargs)` with ' + f'kwargs: {kwargs}') + logger.exception('Error in OpenAI API call! Turn on debug logging ' + 'to see full query that caused error.') + out = {'error': str(e)} + + return out
+ + +
+[docs] + async def call_api_async(self, url, headers, all_request_jsons, + ignore_error=None, rate_limit=40e3): + """Use GPT to clean raw pdf text in parallel calls to the OpenAI API. + + NOTE: you need to call this using the await command in ipython or + jupyter, e.g.: `out = await PDFtoTXT.clean_txt_async()` + + Parameters + ---------- + url : str + OpenAI API url, typically either: + https://api.openai.com/v1/embeddings + https://api.openai.com/v1/chat/completions + headers : dict + OpenAI API headers, typically: + {"Content-Type": "application/json", + "Authorization": f"Bearer {openai.api_key}"} + all_request_jsons : list + List of API data input, one entry typically looks like this for + chat completion: + {"model": "gpt-3.5-turbo", + "messages": [{"role": "system", "content": "You do this..."}, + {"role": "user", "content": "Do this: {}"}], + "temperature": 0.0} + ignore_error : None | callable + Optional callable to parse API error string. If the callable + returns True, the error will be ignored, the API call will not be + tried again, and the output will be an empty string. + rate_limit : float + OpenAI API rate limit (tokens / minute). Note that the + gpt-3.5-turbo limit is 90k as of 4/2023, but we're using a large + factor of safety (~1/2) because we can only count the tokens on the + input side and assume the output is about the same count. + + Returns + ------- + out : list + List of API outputs where each list entry is a GPT answer from the + corresponding message in the all_request_jsons input. + """ + self.api_queue = ApiQueue(url, headers, all_request_jsons, + ignore_error=ignore_error, + rate_limit=rate_limit) + out = await self.api_queue.run() + return out
+ + +
+[docs] + def chat(self, query, temperature=0): + """Have a continuous chat with the LLM including context from previous + chat() calls stored as attributes in this class. + + Parameters + ---------- + query : str + Question to ask ChatGPT + temperature : float + GPT model temperature, a measure of response entropy from 0 to 1. 0 + is more reliable and nearly deterministic; 1 will give the model + more creative freedom and may not return as factual of results. + + Returns + ------- + response : str + Model response + """ + + self.messages.append({"role": "user", "content": query}) + + kwargs = dict(model=self.model, + messages=self.messages, + temperature=temperature, + stream=False) + if 'azure' in str(openai.api_type).lower(): + kwargs['engine'] = self.model + + response = openai.ChatCompletion.create(**kwargs) + response = response["choices"][0]["message"]["content"] + self.messages.append({'role': 'assistant', 'content': response}) + + return response
+ + +
+[docs] + def generic_query(self, query, model_role=None, temperature=0): + """Ask a generic single query without conversation + + Parameters + ---------- + query : str + Question to ask ChatGPT + model_role : str | None + Role for the model to take, e.g.: "You are a research assistant". + This defaults to self.MODEL_ROLE + temperature : float + GPT model temperature, a measure of response entropy from 0 to 1. 0 + is more reliable and nearly deterministic; 1 will give the model + more creative freedom and may not return as factual of results. + + Returns + ------- + response : str + Model response + """ + + model_role = model_role or self.MODEL_ROLE + messages = [{"role": "system", "content": model_role}, + {"role": "user", "content": query}] + kwargs = dict(model=self.model, + messages=messages, + temperature=temperature, + stream=False) + + if 'azure' in str(openai.api_type).lower(): + kwargs['engine'] = self.model + + response = openai.ChatCompletion.create(**kwargs) + response = response["choices"][0]["message"]["content"] + return response
+ + +
+[docs] + async def generic_async_query(self, queries, model_role=None, + temperature=0, ignore_error=None, + rate_limit=40e3): + """Run a number of generic single queries asynchronously + (not conversational) + + NOTE: you need to call this using the await command in ipython or + jupyter, e.g.: `out = await Summary.run_async()` + + Parameters + ---------- + query : list + Questions to ask ChatGPT (list of strings) + model_role : str | None + Role for the model to take, e.g.: "You are a research assistant". + This defaults to self.MODEL_ROLE + temperature : float + GPT model temperature, a measure of response entropy from 0 to 1. 0 + is more reliable and nearly deterministic; 1 will give the model + more creative freedom and may not return as factual of results. + ignore_error : None | callable + Optional callable to parse API error string. If the callable + returns True, the error will be ignored, the API call will not be + tried again, and the output will be an empty string. + rate_limit : float + OpenAI API rate limit (tokens / minute). Note that the + gpt-3.5-turbo limit is 90k as of 4/2023, but we're using a large + factor of safety (~1/2) because we can only count the tokens on the + input side and assume the output is about the same count. + + Returns + ------- + response : list + Model responses with same length as query input. + """ + + model_role = model_role or self.MODEL_ROLE + all_request_jsons = [] + for msg in queries: + msg = [{'role': 'system', 'content': self.MODEL_ROLE}, + {'role': 'user', 'content': msg}] + req = {"model": self.model, "messages": msg, + "temperature": temperature} + all_request_jsons.append(req) + + self.api_queue = ApiQueue(self.URL, self.HEADERS, all_request_jsons, + ignore_error=ignore_error, + rate_limit=rate_limit) + out = await self.api_queue.run() + + for i, response in enumerate(out): + choice = response.get('choices', [{'message': {'content': ''}}])[0] + message = choice.get('message', {'content': ''}) + content = message.get('content', '') + if not any(content): + logger.error(f'Received no output for query {i + 1}!') + else: + out[i] = content + + return out
+ + +
+[docs] + @classmethod + def get_embedding(cls, text): + """Get the 1D array (list) embedding of a text string. + + Parameters + ---------- + text : str + Text to embed + + Returns + ------- + embedding : list + List of float that represents the numerical embedding of the text + """ + kwargs = dict(url=cls.EMBEDDING_URL, + headers=cls.HEADERS, + json={'model': cls.EMBEDDING_MODEL, + 'input': text}) + + out = requests.post(**kwargs) + embedding = out.json() + + try: + embedding = embedding["data"][0]["embedding"] + except Exception as exc: + msg = ('Embedding request failed: {} {}' + .format(out.reason, embedding)) + logger.error(msg) + raise RuntimeError(msg) from exc + + return embedding
+ + +
+[docs] + @staticmethod + def count_tokens(text, model): + """Return the number of tokens in a string. + + Parameters + ---------- + text : str + Text string to get number of tokens for + model : str + specification of OpenAI model to use (e.g., "gpt-3.5-turbo") + + Returns + ------- + n : int + Number of tokens in text + """ + + # Optional mappings for weird azure names to tiktoken/openai names + tokenizer_aliases = {'gpt-35-turbo': 'gpt-3.5-turbo', + 'gpt-4-32k': 'gpt-4-32k-0314' + } + + token_model = tokenizer_aliases.get(model, model) + encoding = tiktoken.encoding_for_model(token_model) + + return len(encoding.encode(text))
+
+ + + +
+[docs] +class ApiQueue: + """Class to manage the parallel API queue and submission""" + + def __init__(self, url, headers, request_jsons, ignore_error=None, + rate_limit=40e3, max_retries=10): + """ + Parameters + ---------- + url : str + OpenAI API url, typically either: + https://api.openai.com/v1/embeddings + https://api.openai.com/v1/chat/completions + headers : dict + OpenAI API headers, typically: + {"Content-Type": "application/json", + "Authorization": f"Bearer {openai.api_key}"} + all_request_jsons : list + List of API data input, one entry typically looks like this for + chat completion: + {"model": "gpt-3.5-turbo", + "messages": [{"role": "system", "content": "You do this..."}, + {"role": "user", "content": "Do this: {}"}], + "temperature": 0.0} + ignore_error : None | callable + Optional callable to parse API error string. If the callable + returns True, the error will be ignored, the API call will not be + tried again, and the output will be an empty string. + rate_limit : float + OpenAI API rate limit (tokens / minute). Note that the + gpt-3.5-turbo limit is 90k as of 4/2023, but we're using a large + factor of safety (~1/2) because we can only count the tokens on the + input side and assume the output is about the same count. + max_retries : int + Number of times to retry an API call with an error response before + raising an error. + """ + + self.url = url + self.headers = headers + self.request_jsons = request_jsons + self.ignore_error = ignore_error + self.rate_limit = rate_limit + self.max_retries = max_retries + self.api_jobs = None + self.todo = None + self.out = None + self.errors = None + self.tries = None + self._retry = False + self._tsub = 0 + self._reset() + self.job_names = [f'job_{str(ijob).zfill(4)}' + for ijob in range(len(request_jsons))] + + def _reset(self): + self.api_jobs = {} + self.todo = [True] * len(self) + self.out = [None] * len(self) + self.errors = [None] * len(self) + self.tries = np.zeros(len(self), dtype=int) + self._retry = False + self._tsub = 0 + + def __len__(self): + """Number of API calls to submit""" + return len(self.request_jsons) + + @property + def waiting_on(self): + """Get a list of async jobs that are being waited on.""" + return [job for ijob, job in self.api_jobs.items() if self.todo[ijob]] + +
+[docs] + def submit_jobs(self): + """Submit a subset jobs asynchronously and hold jobs in the `api_jobs` + attribute. Break when the `rate_limit` is exceeded.""" + + token_count = 0 + t_elap = (time.time() - self._tsub) / 60 + avail_tokens = self.rate_limit * t_elap + avail_tokens = min(self.rate_limit, avail_tokens) + + for ijob, itodo in enumerate(self.todo): + if (ijob not in self.api_jobs + and itodo + and token_count < avail_tokens): + request = self.request_jsons[ijob] + model = request['model'] + tokens = ApiBase.count_tokens(str(request), model) + + if tokens > self.rate_limit: + msg = ('Job index #{} with has {} tokens which ' + 'is greater than the rate limit of {}!' + .format(ijob, tokens, self.rate_limit)) + logger.error(msg) + raise RuntimeError(msg) + + elif tokens < avail_tokens: + token_count += tokens + task = asyncio.create_task(ApiBase.call_api(self.url, + self.headers, + request), + name=self.job_names[ijob]) + self.api_jobs[ijob] = task + self.tries[ijob] += 1 + self._tsub = time.time() + + logger.debug('Submitted "{}" ({} out of {}). ' + 'Token count: {} ' + '(rate limit is {}). ' + 'Attempts: {}' + .format(self.job_names[ijob], + ijob + 1, len(self), token_count, + self.rate_limit, + self.tries[ijob])) + + elif token_count >= avail_tokens: + token_count = 0 + break
+ + +
+[docs] + async def collect_jobs(self): + """Collect asyncronous API calls and API outputs. Store outputs in the + `out` attribute.""" + + if not any(self.waiting_on): + return + + complete, _ = await asyncio.wait(self.waiting_on, + return_when=asyncio.FIRST_COMPLETED) + + for job in complete: + job_name = job.get_name() + ijob = self.job_names.index(job_name) + task_out = job.result() + + if 'error' in task_out: + msg = ('Received API error for task #{0} ' + '(see `ApiQueue.errors[{1}]` and ' + '`ApiQueue.request_jsons[{1}]` for more details). ' + 'Error message: {2}'.format(ijob + 1, ijob, task_out)) + self.errors[ijob] = 'Error: {}'.format(task_out) + + if (self.ignore_error is not None + and self.ignore_error(str(task_out))): + msg += ' Ignoring error and moving on.' + dummy = {'choices': [{'message': {'content': ''}}]} + self.out[ijob] = dummy + self.todo[ijob] = False + else: + del self.api_jobs[ijob] + msg += ' Retrying query.' + self._retry = True + logger.error(msg) + + else: + self.out[ijob] = task_out + self.todo[ijob] = False + + n_complete = len(self) - sum(self.todo) + logger.debug('Finished {} API calls, {} left' + .format(n_complete, sum(self.todo)))
+ + +
+[docs] + async def run(self): + """Run all asyncronous API calls. + + Returns + ------- + out : list + List of API call outputs with same ordering as `request_jsons` + input. + """ + + self._reset() + logger.debug('Submitting async API calls...') + + i = 0 + while any(self.todo): + i += 1 + self._retry = False + self.submit_jobs() + await self.collect_jobs() + + if any(self.tries > self.max_retries): + msg = (f'Hit {self.max_retries} retries on API queries. ' + 'Stopping. See `ApiQueue.errors` for more ' + 'details on error response') + logger.error(msg) + raise RuntimeError(msg) + elif self._retry: + time.sleep(10) + elif i > 1e4: + raise RuntimeError('Hit 1e4 iterations. What are you doing?') + elif any(self.todo): + time.sleep(5) + + return self.out
+
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/elm/chunk.html b/_modules/elm/chunk.html new file mode 100644 index 00000000..7c3512d5 --- /dev/null +++ b/_modules/elm/chunk.html @@ -0,0 +1,455 @@ + + + + + + elm.chunk — elm 0.0.1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for elm.chunk

+# -*- coding: utf-8 -*-
+"""
+Utility to break text up into overlapping chunks.
+"""
+import copy
+from elm.base import ApiBase
+
+
+
+[docs] +class Chunker(ApiBase): + """ + Class to break text up into overlapping chunks + + NOTE: very large paragraphs that exceed the tokens per chunk will not be + split up and will still be padded with overlap. + """ + + def __init__(self, text, tag=None, tokens_per_chunk=500, overlap=1, + split_on='\n\n'): + """ + Parameters + ---------- + text : str + Single body of text to break up. Works well if this is a single + document with empty lines between paragraphs. + tag : None | str + Optional reference tag to include at the beginning of each text + chunk + tokens_per_chunk : float + Nominal token count per text chunk. Overlap paragraphs will exceed + this. + overlap : int + Number of paragraphs to overlap between chunks + split_on : str + Sub string to split text into paragraphs. + """ + + super().__init__() + + self._split_on = split_on + self._idc = 0 # iter index for chunk + self.text = self.clean_paragraphs(text) + self.tag = tag + self.tokens_per_chunk = tokens_per_chunk + self.overlap = overlap + self._paragraphs = None + self._ptokens = None + self._ctokens = None + self._chunks = self.chunk_text() + + def __getitem__(self, i): + """Get a chunk index + + Returns + ------- + str + """ + return self.chunks[i] + + def __iter__(self): + self._idc = 0 + return self + + def __next__(self): + """Iterator returns one of the text chunks at a time + + Returns + ------- + str + """ + + if self._idc >= len(self): + raise StopIteration + + out = self.chunks[self._idc] + self._idc += 1 + return out + + def __len__(self): + """Number of text chunks + + Return + ------ + int + """ + return len(self.chunks) + + @property + def chunks(self): + """List of overlapping text chunks (strings). + + Returns + ------- + list + """ + return self._chunks + + @property + def paragraphs(self): + """Get a list of paragraphs in the text demarkated by an empty line. + + Returns + ------- + list + """ + if self._paragraphs is None: + self._paragraphs = self.text.split(self._split_on) + self._paragraphs = [p for p in self._paragraphs + if self.is_good_paragraph(p)] + return self._paragraphs + +
+[docs] + @staticmethod + def clean_paragraphs(text): + """Clean up double line breaks to make sure paragraphs can be detected + in the text.""" + previous_len = len(text) + while True: + text = text.replace('\n ', '\n') + if len(text) == previous_len: + break + else: + previous_len = len(text) + return text
+ + +
+[docs] + @staticmethod + def is_good_paragraph(paragraph): + """Basic tests to make sure the paragraph is useful text.""" + if '.....' in paragraph: + return False + elif paragraph.strip().isnumeric(): + return False + else: + return True
+ + + @property + def paragraph_tokens(self): + """Number of tokens per paragraph. + + Returns + ------- + list + """ + if self._ptokens is None: + self._ptokens = [self.count_tokens(p, self.model) + for p in self.paragraphs] + return self._ptokens + + @property + def chunk_tokens(self): + """Number of tokens per chunk. + + Returns + ------- + list + """ + if self._ctokens is None: + self._ctokens = [self.count_tokens(c, self.model) + for c in self.chunks] + return self._ctokens + +
+[docs] + def merge_chunks(self, chunks_input): + """Merge chunks until they reach the token limit per chunk. + + Parameters + ---------- + chunks_input : list + List of list of integers: [[0, 1], [2], [3, 4]] where nested lists + are chunks and the integers are paragraph indices + + Returns + ------- + chunks : list + List of list of integers: [[0, 1], [2], [3, 4]] where nested lists + are chunks and the integers are paragraph indices + """ + + chunks = copy.deepcopy(chunks_input) + + for i in range(len(chunks) - 1): + chunk0 = chunks[i] + chunk1 = chunks[i + 1] + if chunk0 is not None and chunk1 is not None: + tcount0 = sum(self.paragraph_tokens[j] for j in chunk0) + tcount1 = sum(self.paragraph_tokens[j] for j in chunk1) + if tcount0 + tcount1 < self.tokens_per_chunk: + chunk0 += chunk1 + chunks[i] = chunk0 + chunks[i + 1] = None + + chunks = [c for c in chunks if c is not None] + flat_chunks = [a for b in chunks for a in b] + + assert all(c in list(range(len(self.paragraphs))) for c in flat_chunks) + + return chunks
+ + +
+[docs] + def add_overlap(self, chunks_input): + """Add overlap on either side of a text chunk. This ignores token + limit. + + Parameters + ---------- + chunks_input : list + List of list of integers: [[0, 1], [2], [3, 4]] where nested lists + are chunks and the integers are paragraph indices + + Returns + ------- + chunks : list + List of list of integers: [[0, 1], [2], [3, 4]] where nested lists + are chunks and the integers are paragraph indices + """ + + if len(chunks_input) == 1 or self.overlap == 0: + return chunks_input + + chunks = copy.deepcopy(chunks_input) + + for i, chunk1 in enumerate(chunks_input): + + if i == 0: + chunk2 = chunks_input[i + 1] + chunk1 = chunk1 + chunk2[:self.overlap] + + elif i == len(chunks) - 1: + chunk0 = chunks_input[i - 1] + chunk1 = chunk0[-self.overlap:] + chunk1 + + else: + chunk0 = chunks_input[i - 1] + chunk2 = chunks_input[i + 1] + chunk1 = (chunk0[-self.overlap:] + + chunk1 + + chunk2[:self.overlap]) + + chunks[i] = chunk1 + + return chunks
+ + +
+[docs] + def chunk_text(self): + """Perform the text chunking operation + + Returns + ------- + chunks : list + List of strings where each string is an overlapping chunk of text + """ + + chunks_input = [[i] for i in range(len(self.paragraphs))] + while True: + chunks = self.merge_chunks(chunks_input) + if chunks == chunks_input: + break + else: + chunks_input = copy.deepcopy(chunks) + + chunks = self.add_overlap(chunks) + text_chunks = [] + for chunk in chunks: + paragraphs = [self.paragraphs[c] for c in chunk] + text_chunks.append(self._split_on.join(paragraphs)) + + if self.tag is not None: + text_chunks = [self.tag + '\n\n' + chunk for chunk in text_chunks] + + return text_chunks
+
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/elm/embed.html b/_modules/elm/embed.html new file mode 100644 index 00000000..7b145b1d --- /dev/null +++ b/_modules/elm/embed.html @@ -0,0 +1,350 @@ + + + + + + elm.embed — elm 0.0.1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for elm.embed

+# -*- coding: utf-8 -*-
+"""
+ELM text embedding
+"""
+import openai
+import re
+import os
+import logging
+
+from elm.base import ApiBase
+from elm.chunk import Chunker
+
+
+logger = logging.getLogger(__name__)
+
+
+
+[docs] +class ChunkAndEmbed(ApiBase): + """Class to chunk text data and create embeddings""" + + DEFAULT_MODEL = 'text-embedding-ada-002' + """Default model to do embeddings.""" + + def __init__(self, text, model=None, **chunk_kwargs): + """ + Parameters + ---------- + text : str + Single continuous piece of text to chunk up by paragraph and embed + or filepath to .txt file containing one piece of text. + model : None | str + Optional specification of OpenAI model to use. Default is + cls.DEFAULT_MODEL + chunk_kwargs : dict | None + kwargs for initialization of :class:`elm.chunk.Chunker` + """ + + super().__init__(model) + + self.text = text + + if os.path.isfile(text): + logger.info('Loading text file: {}'.format(text)) + with open(text, 'r') as f: + self.text = f.read() + + assert isinstance(self.text, str) + self.text = self.clean_tables(self.text) + + self.text_chunks = Chunker(self.text, **chunk_kwargs) + +
+[docs] + @staticmethod + def clean_tables(text): + """Make sure that table headers are in the same paragraph as the table + itself. Typically, tables are looked for with pipes and hyphens, which + is how GPT cleans tables in text.""" + + # looks for "Table N.", should expand to other formats with additional + # regex patterns later + table_regex = r"^Table [0-9]+." + + lines = text.split('\n') + for idx, line in enumerate(lines[:-2]): + next_line_1 = lines[idx + 1] + next_line_2 = lines[idx + 2] + match = re.search(table_regex, line) + cond1 = match is not None + cond2 = next_line_1.strip() == '' + cond3 = next_line_2.startswith('|') + + if all([cond1, cond2, cond3]): + lines[idx + 1] = line + lines[idx] = '' + + return '\n'.join(lines)
+ + +
+[docs] + def run(self, rate_limit=175e3): + """Run text embedding in serial + + Parameters + ---------- + rate_limit : float + OpenAI API rate limit (tokens / minute). Note that the + embedding limit is 350k as of 4/2023, but we're using a large + factor of safety (~1/2) because we can only count the tokens on the + input side and assume the output is about the same count. + + Returns + ------- + embedding : list + List of 1D arrays representing the embeddings for all text chunks + """ + + logger.info('Embedding {} text chunks...' + .format(len(self.text_chunks))) + + embeddings = [] + for i, chunk in enumerate(self.text_chunks): + req = {"input": chunk, "model": self.model} + + if 'azure' in str(openai.api_type).lower(): + req['engine'] = self.model + + out = self.call_api(self.EMBEDDING_URL, self.HEADERS, req) + + try: + out = out['data'][0]['embedding'] + embeddings.append(out) + except Exception: + msg = ('Could not get embeddings for chunk {}, ' + 'received API response: {}'.format(i + 1, out)) + logger.error(msg) + embeddings.append(None) + + logger.info('Finished all embeddings.') + + return embeddings
+ + +
+[docs] + async def run_async(self, rate_limit=175e3): + """Run text embedding on chunks asynchronously + + NOTE: you need to call this using the await command in ipython or + jupyter, e.g.: `out = await ChunkAndEmbed.run_async()` + + Parameters + ---------- + rate_limit : float + OpenAI API rate limit (tokens / minute). Note that the + embedding limit is 350k as of 4/2023, but we're using a large + factor of safety (~1/2) because we can only count the tokens on the + input side and assume the output is about the same count. + + Returns + ------- + embedding : list + List of 1D arrays representing the embeddings for all text chunks + """ + + logger.info('Embedding {} text chunks...' + .format(len(self.text_chunks))) + + all_request_jsons = [] + for chunk in self.text_chunks: + req = {"input": chunk, "model": self.model} + + if 'azure' in str(openai.api_type).lower(): + req['engine'] = self.model + + all_request_jsons.append(req) + + embeddings = await self.call_api_async(self.EMBEDDING_URL, + self.HEADERS, + all_request_jsons, + rate_limit=rate_limit) + + for i, chunk in enumerate(embeddings): + try: + embeddings[i] = chunk['data'][0]['embedding'] + except Exception: + msg = ('Could not get embeddings for chunk {}, ' + 'received API response: {}'.format(i + 1, chunk)) + logger.error(msg) + embeddings[i] = None + + logger.info('Finished all embeddings.') + + return embeddings
+
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/elm/osti.html b/_modules/elm/osti.html new file mode 100644 index 00000000..841aef72 --- /dev/null +++ b/_modules/elm/osti.html @@ -0,0 +1,496 @@ + + + + + + elm.osti — elm 0.0.1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for elm.osti

+# -*- coding: utf-8 -*-
+"""
+Utilities for retrieving data from OSTI.
+"""
+import copy
+import requests
+import json
+import os
+import pandas as pd
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+
+[docs] +class OstiRecord(dict): + """Class to handle a single OSTI record as dictionary data""" + + def __init__(self, record): + """ + Parameters + ---------- + record : dict + OSTI record in dict form, typically a response from OSTI API. + """ + assert isinstance(record, dict) + super().__init__(**record) + +
+[docs] + @staticmethod + def strip_nested_brackets(text): + """Remove text between brackets/parentheses for cleaning OSTI text""" + ret = '' + skip1c = 0 + skip2c = 0 + for i in text: + if i == '[': + skip1c += 1 + elif i == '(': + skip2c += 1 + elif i == ']' and skip1c > 0: + skip1c -= 1 + elif i == ')' and skip2c > 0: + skip2c -= 1 + elif skip1c == 0 and skip2c == 0: + ret += i + return ret
+ + + @property + def authors(self): + """Get the list of authors of this record. + + Returns + ------- + str + """ + au = copy.deepcopy(self.get('authors', None)) + if au is not None: + for i, name in enumerate(au): + name = self.strip_nested_brackets(name) + if name.count(',') == 1: + second, first = name.split(',') + name = f'{first.strip()} {second.strip()}' + au[i] = name + au = ', '.join(au) + return au + + @property + def title(self): + """Get the title of this record + + Returns + ------- + str | None + """ + return self.get('title', None) + + @property + def year(self): + """Get the year of publication of this record + + Returns + ------- + str | None + """ + year = self.get('publication_date', None) + if year is not None: + year = year.split('-')[0] + year = str(year) + return year + + @property + def date(self): + """Get the date of publication of this record + + Returns + ------- + str | None + """ + date = self.get('publication_date', None) + if date is not None: + date = date.split('T')[0] + date = str(date) + return date + + @property + def doi(self): + """Get the DOI of this record + + Returns + ------- + str | None + """ + return self.get('doi', None) + + @property + def osti_id(self): + """Get the OSTI ID of this record which is typically a 7 digit number + + Returns + ------- + str | None + """ + return self.get('osti_id', None) + + @property + def url(self): + """Get the download URL of this record + + Returns + ------- + str | None + """ + url = None + for link in self['links']: + if link.get('rel', None) == 'fulltext': + url = link.get('href', None) + break + return url + +
+[docs] + def download(self, fp): + """Download the PDF of this record + + Parameters + ---------- + fp : str + Filepath to download this record to, typically a .pdf + """ + # OSTI returns citation on first query and pdf on second (weird) + session = requests.Session() + response = session.get(self.url) + response = session.get(self.url) + with open(fp, 'wb') as f_pdf: + f_pdf.write(response.content)
+
+ + + +
+[docs] +class OstiList(list): + """Class to retrieve and handle multiple OSTI records from an API URL.""" + + BASE_URL = 'https://www.osti.gov/api/v1/records' + """Base OSTI API URL. This can be appended with search parameters""" + + def __init__(self, url, n_pages=1): + """ + Parameters + ---------- + url : str + OSTI API URL to request, see this for details: + https://www.osti.gov/api/v1/docs + n_pages : int + Number of pages to get from the API. Typical response has 20 + entries per page. Default of 1 ensures that this class doesnt hang + on a million responses. + """ + + self.url = url + self._session = requests.Session() + self._response = None + self._n_pages = 0 + self._iter = 0 + + records = self._get_first() + for page in self._get_pages(n_pages=n_pages): + records += page + records = [OstiRecord(single) for single in records] + super().__init__(records) + + def _get_first(self): + """Get the first page of OSTI records + + Returns + ------- + list + """ + self._response = self._session.get(self.url) + + if not self._response.ok: + msg = ('OSTI API Request got error {}: "{}"' + .format(self._response.status_code, + self._response.reason)) + raise RuntimeError(msg) + first_page = self._response.json() + + self._n_pages = 1 + if 'last' in self._response.links: + url = self._response.links['last']['url'] + self._n_pages = int(url.split('page=')[-1]) + + logger.debug('Found approximately {} records.' + .format(self._n_pages * len(first_page))) + + return first_page + + def _get_pages(self, n_pages): + """Get response pages up to n_pages from OSTI. + + Parameters + ---------- + n_pages : int + Number of pages to retrieve + + Returns + ------- + next_pages : list + This function will return a generator of next pages, each of which + is a list of OSTI records + """ + if n_pages > 1: + for page in range(2, self._n_pages + 1): + if page <= n_pages: + next_page = self._session.get(self.url, + params={'page': page}) + next_page = next_page.json() + yield next_page + else: + break + +
+[docs] + def download(self, out_dir): + """Download all PDFs from the records in this OSTI object into a + directory. PDFs will be given file names based on their OSTI record ID + + Parameters + ---------- + out_dir : str + Directory to download PDFs to. This directory will be created if it + does not already exist. + """ + logger.info('Downloading {} records to: {}'.format(len(self), out_dir)) + os.makedirs(out_dir, exist_ok=True) + for record in self: + fp_out = os.path.join(out_dir, record.osti_id + '.pdf') + if not os.path.exists(fp_out): + try: + record.download(fp_out) + except Exception as e: + logger.exception('Could not download OSTI ID {} "{}": {}' + .format(record.osti_id, record.title, e)) + logger.info('Finished download!')
+ + + @property + def meta(self): + """Get a meta dataframe with details on all of the OSTI records. + + Returns + ------- + pd.DataFrame + """ + i = 0 + attrs = ('authors', 'title', 'year', 'date', 'doi', 'osti_id', 'url') + df = pd.DataFrame(columns=attrs) + for record in self: + for attr in attrs: + out = getattr(record, attr) + if not isinstance(out, str): + out = json.dumps(out) + df.at[i, attr] = out + df.at[i, 'fp'] = f'./osti_pdfs/{record.osti_id}.pdf' + df.at[i, 'fn'] = f'{record.osti_id}.pdf' + i += 1 + return df + +
+[docs] + @classmethod + def from_osti_ids(cls, oids): + """Initialize OSTI records from one or more numerical IDS + + Parameters + ---------- + oids : list + List of string or integer OSTI IDs which are typically 7 digit + numbers + + Returns + ------- + out : OstiList + OstiList object with entries for each oid input. + """ + if not isinstance(oids, (list, tuple)): + oids = [oids] + oids = [str(oid) for oid in oids] + out = None + for oid in oids: + iout = cls(cls.BASE_URL + '/' + oid) + if out is None: + out = iout + else: + out += iout + return out
+
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/elm/pdf.html b/_modules/elm/pdf.html new file mode 100644 index 00000000..10fbb1ac --- /dev/null +++ b/_modules/elm/pdf.html @@ -0,0 +1,638 @@ + + + + + + elm.pdf — elm 0.0.1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for elm.pdf

+# -*- coding: utf-8 -*-
+"""
+ELM PDF to text parser
+"""
+import os
+import subprocess
+import numpy as np
+import requests
+import tempfile
+import copy
+from PyPDF2 import PdfReader
+import logging
+
+from elm.base import ApiBase
+
+
+logger = logging.getLogger(__name__)
+
+
+
+[docs] +class PDFtoTXT(ApiBase): + """Class to parse text from a PDF document.""" + + MODEL_ROLE = ('You clean up poorly formatted text ' + 'extracted from PDF documents.') + """High level model role.""" + + MODEL_INSTRUCTION = ('Text extracted from a PDF: ' + '\n"""\n{}\n"""\n\n' + 'The text above was extracted from a PDF document. ' + 'Can you make it nicely formatted? ' + 'Please only return the formatted text ' + 'without comments or added information.') + """Instructions to the model with python format braces for pdf text""" + + def __init__(self, fp, page_range=None, model=None): + """ + Parameters + ---------- + fp : str + Filepath to .pdf file to extract. + page_range : None | list + Optional 2-entry list/tuple to set starting and ending pages + (python indexing) + model : None | str + Optional specification of OpenAI model to use. Default is + cls.DEFAULT_MODEL + """ + super().__init__(model) + self.fp = fp + self.raw_pages = self.load_pdf(page_range) + self.pages = self.raw_pages + self.full = self.combine_pages(self.raw_pages) + +
+[docs] + def load_pdf(self, page_range): + """Basic load of pdf to text strings + + Parameters + ---------- + page_range : None | list + Optional 2-entry list/tuple to set starting and ending pages + (python indexing) + + Returns + ------- + out : list + List of strings where each entry is a page. This is the raw PDF + text before GPT cleaning + """ + + logger.info('Loading PDF: {}'.format(self.fp)) + out = [] + pdf = PdfReader(self.fp) + + if page_range is not None: + assert len(page_range) == 2 + page_range = slice(*page_range) + else: + page_range = slice(0, None) + + for i, page in enumerate(pdf.pages[page_range]): + page_text = page.extract_text() + if len(page_text.strip()) == 0: + logger.debug('Skipping empty page {} out of {}' + .format(i + 1 + page_range.start, len(pdf.pages))) + else: + out.append(page_text) + + logger.info('Finished loading PDF.') + return out
+ + +
+[docs] + def make_gpt_messages(self, pdf_raw_text): + """Make the chat completion messages list for input to GPT + + Parameters + ---------- + pdf_raw_text : str + Raw PDF text to be cleaned + + Returns + ------- + messages : list + Messages for OpenAI chat completion model. Typically this looks + like this: + [{"role": "system", "content": "You do this..."}, + {"role": "user", "content": "Please do this: {}"}] + """ + + query = self.MODEL_INSTRUCTION.format(pdf_raw_text) + messages = [{"role": "system", "content": self.MODEL_ROLE}, + {"role": "user", "content": query}] + + return messages
+ + +
+[docs] + def clean_txt(self): + """Use GPT to clean raw pdf text in serial calls to the OpenAI API. + + Returns + ------- + clean_pages : list + List of clean text strings where each list entry is a page from the + PDF + """ + + logger.info('Cleaning PDF text...') + clean_pages = [] + + for i, raw_page in enumerate(self.raw_pages): + msg = self.make_gpt_messages(copy.deepcopy(raw_page)) + req = {"model": self.model, "messages": msg, "temperature": 0.0} + + kwargs = dict(url=self.URL, headers=self.HEADERS, json=req) + + try: + response = requests.post(**kwargs) + response = response.json() + except Exception as e: + msg = 'Error in OpenAI API call!' + logger.exception(msg) + response = {'error': str(e)} + + choice = response.get('choices', [{'message': {'content': ''}}])[0] + message = choice.get('message', {'content': ''}) + content = message.get('content', '') + clean_pages.append(content) + logger.debug('Cleaned page {} out of {}' + .format(i + 1, len(self.raw_pages))) + + logger.info('Finished cleaning PDF.') + + self.pages = clean_pages + self.full = self.combine_pages(self.pages) + self.validate_clean() + + return clean_pages
+ + +
+[docs] + async def clean_txt_async(self, ignore_error=None, rate_limit=40e3): + """Use GPT to clean raw pdf text in parallel calls to the OpenAI API. + + NOTE: you need to call this using the await command in ipython or + jupyter, e.g.: `out = await PDFtoTXT.clean_txt_async()` + + Parameters + ---------- + ignore_error : None | callable + Optional callable to parse API error string. If the callable + returns True, the error will be ignored, the API call will not be + tried again, and the output will be an empty string. + rate_limit : float + OpenAI API rate limit (tokens / minute). Note that the + gpt-3.5-turbo limit is 90k as of 4/2023, but we're using a large + factor of safety (~1/2) because we can only count the tokens on the + input side and assume the output is about the same count. + + Returns + ------- + clean_pages : list + List of clean text strings where each list entry is a page from the + PDF + """ + + logger.info('Cleaning PDF text asyncronously...') + + all_request_jsons = [] + for page in self.raw_pages: + msg = self.make_gpt_messages(page) + req = {"model": self.model, "messages": msg, "temperature": 0.0} + all_request_jsons.append(req) + + clean_pages = await self.call_api_async(self.URL, self.HEADERS, + all_request_jsons, + ignore_error=ignore_error, + rate_limit=rate_limit) + + for i, page in enumerate(clean_pages): + choice = page.get('choices', [{'message': {'content': ''}}])[0] + message = choice.get('message', {'content': ''}) + content = message.get('content', '') + clean_pages[i] = content + + logger.info('Finished cleaning PDF.') + + self.pages = clean_pages + self.full = self.combine_pages(self.pages) + self.validate_clean() + + return clean_pages
+ + +
+[docs] + def is_double_col(self, separator=' '): + """Does the text look like it has multiple vertical text columns? + + Parameters + ---------- + separator : str + Heuristic split string to look for spaces between columns + + Returns + ------- + out : bool + True if more than one vertical text column + """ + lines = self.full.split('\n') + n_cols = np.zeros(len(lines)) + for i, line in enumerate(lines): + columns = line.strip().split(separator) + n_cols[i] = len(columns) + return np.median(n_cols) >= 2
+ + +
+[docs] + def clean_poppler(self, layout=True): + """Clean the pdf using the poppler pdftotxt utility + + Requires the `pdftotext` command line utility from this software: + https://poppler.freedesktop.org/ + + Parameters + ---------- + layout : bool + Layout flag for poppler pdftotxt utility: "maintain original + physical layout". Layout=True works well for single column text, + layout=False collapses the double columns into single columns which + works better for downstream chunking and LLM work. + + Returns + ------- + out : str + Joined cleaned pages + """ + + with tempfile.TemporaryDirectory() as td: + fp_out = os.path.join(td, 'poppler_out.txt') + args = ['pdftotext', f"{self.fp}", f"{fp_out}"] + if layout: + args.insert(1, '-layout') + + if not os.path.exists(os.path.dirname(fp_out)): + os.makedirs(os.path.dirname(fp_out), exist_ok=True) + + stdout = subprocess.run(args, check=True, stdout=subprocess.PIPE) + if stdout.returncode != 0: + msg = ('Poppler raised return code {}: {}' + .format(stdout.returncode, stdout)) + logger.exception(msg) + raise RuntimeError(msg) + + with open(fp_out, 'r') as f: + clean_txt = f.read() + + # break on poppler page break + self.pages = clean_txt.split('\x0c') + remove = [] + for i, page in enumerate(self.pages): + if not any(page.strip()): + remove.append(i) + for i in remove[::-1]: + _ = self.pages.pop(i) + + self.full = self.combine_pages(self.pages) + + return self.full
+ + +
+[docs] + def validate_clean(self): + """Run some basic checks on the GPT cleaned text vs. the raw text""" + repl = ('\n', '.', ',', '-', '/', ':') + + if not any(self.full.replace('\n', '').strip()): + msg = 'Didnt get ANY clean output text!' + logger.error(msg) + raise RuntimeError(msg) + + def replace_chars_for_clean(text): + for char in repl: + text = text.replace(char, ' ') + return text + + for i, (raw, clean) in enumerate(zip(self.raw_pages, self.pages)): + raw_words = replace_chars_for_clean(raw).split(' ') + clean_words = replace_chars_for_clean(clean).split(' ') + + raw_words = {x for x in raw_words if len(x) > 2} + clean_words = {x for x in clean_words if len(x) > 2} + + isin = sum(x in clean_words for x in raw_words) + + perc = 100 + if isin > 0 and len(raw_words) > 0: + perc = 100 * isin / len(raw_words) + + if perc < 70: + logger.warning('Page {} of {} has a {:.2f}% match with {} ' + 'unique words in the raw text.' + .format(i + 1, len(self.raw_pages), perc, + len(raw_words))) + else: + logger.info('Page {} of {} has a {:.2f}% match with {} ' + 'unique words in the raw text.' + .format(i + 1, len(self.raw_pages), perc, + len(raw_words)))
+ + +
+[docs] + @staticmethod + def combine_pages(pages): + """Combine pages of GPT cleaned text into a single string. + + Parameters + ---------- + pages : list + List of clean text strings where each list entry is a page from the + PDF + + Returns + ------- + full : str + Single multi-page string + """ + full = '\n'.join(pages) + full = full.replace('\n•', '-') + full = full.replace('•', '-') + return full
+ + + def _get_nominal_headers(self, split_on, iheaders): + """Get nominal headers from a standard page. Aim for a "typical" page + that is likely to have a normal header, not the first or last. + + Parameters + ---------- + split_on : str + Chars to split lines of a page on + iheaders : list | tuple + Integer indices to look for headers after splitting a page into + lines based on split_on. This needs to go from the start of the + page to the end. + + Returns + ------- + headers : list + List of headers where each entry is a string header + """ + + headers = [None] * len(iheaders) + page_lens = np.array([len(p) for p in self.pages]) + median_len = np.median(page_lens) + ipage = np.argmin(np.abs(page_lens - median_len)) + page = self.pages[ipage] + for i, ih in enumerate(iheaders): + headers[i] = page.split(split_on)[ih] + + return headers + +
+[docs] + def clean_headers(self, char_thresh=0.6, page_thresh=0.8, split_on='\n', + iheaders=(0, 1, -2, -1)): + """Clean headers/footers that are duplicated across pages + + Parameters + ---------- + char_thresh : float + Fraction of characters in a given header that are similar between + pages to be considered for removal + page_thresh : float + Fraction of pages that share the header to be considered for + removal + split_on : str + Chars to split lines of a page on + iheaders : list | tuple + Integer indices to look for headers after splitting a page into + lines based on split_on. This needs to go from the start of the + page to the end. + + Returns + ------- + out : str + Clean text with all pages joined + """ + logger.info('Cleaning headers') + headers = self._get_nominal_headers(split_on, iheaders) + tests = np.zeros((len(self.pages), len(headers))) + + for ip, page in enumerate(self.pages): + for ih, header in zip(iheaders, headers): + pheader = '' + try: + pheader = page.split(split_on)[ih] + except IndexError: + pass + + harr = header.replace(' ', '') + parr = pheader.replace(' ', '') + + harr = harr.ljust(len(parr)) + parr = parr.ljust(len(harr)) + + harr = np.array([*harr]) + parr = np.array([*parr]) + assert len(harr) == len(parr) + + test = harr == parr + if len(test) == 0: + test = 1.0 + else: + test = test.sum() / len(test) + + tests[ip, ih] = test + + logger.debug('Header tests (page, iheader): \n{}'.format(tests)) + tests = (tests > char_thresh).sum(axis=0) / len(self.pages) + tests = (tests > page_thresh) + logger.debug('Header tests (iheader,): \n{}'.format(tests)) + + for ip, page in enumerate(self.pages): + page = page.split(split_on) + for i, iheader in enumerate(iheaders): + if tests[i] and len(page) > np.abs(iheader): + _ = page.pop(iheader) + + page = split_on.join(page) + self.pages[ip] = page + + self.full = self.combine_pages(self.pages) + return self.full
+
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/elm/summary.html b/_modules/elm/summary.html new file mode 100644 index 00000000..04de5c31 --- /dev/null +++ b/_modules/elm/summary.html @@ -0,0 +1,367 @@ + + + + + + elm.summary — elm 0.0.1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for elm.summary

+# -*- coding: utf-8 -*-
+"""
+Research Summarization and Distillation with LLMs
+"""
+import logging
+import os
+
+from elm.base import ApiBase
+from elm.chunk import Chunker
+
+
+logger = logging.getLogger(__name__)
+
+
+
+[docs] +class Summary(ApiBase): + """Interface to perform Recursive Summarization and Distillation of + research text""" + + MODEL_ROLE = "You are an energy scientist summarizing prior research" + """High level model role, somewhat redundant to MODEL_INSTRUCTION""" + + MODEL_INSTRUCTION = ('Can you please summarize the text quoted above ' + 'in {n_words} words?\n\n"""\n{text_chunk}\n"""') + """Prefix to the engineered prompt. The format args `text_chunk` and + `n_words` will be formatted by the Summary class at runtime. `text_chunk` + will be provided by the Summary text chunks, `n_words` is an initialization + argument for the Summary class.""" + + def __init__(self, text, model=None, n_words=500, **chunk_kwargs): + """ + Parameters + ---------- + text : str | list + Single body of text to chunk up using elm.Chunker or a pre-chunked + list of strings. Works well if this is a single document with empty + lines between paragraphs. + model : str + GPT model name, default is the DEFAULT_MODEL global var + n_words : int + Desired length of the output text. Note that this is never perfect + but helps guide the LLM to an approximate desired output length. + 400-600 words seems to work quite well with GPT-4. This gets + formatted into the MODEL_INSTRUCTION attribute. + chunk_kwargs : dict | None + kwargs for initialization of :class:`elm.chunk.Chunker` + """ + + super().__init__(model) + + self.text = text + self.n_words = n_words + + assert isinstance(self.text, (str, list, tuple)) + + if isinstance(self.text, str): + if os.path.isfile(text): + logger.info('Loading text file: {}'.format(text)) + with open(text, 'r') as f: + self.text = f.read() + self.text_chunks = Chunker(self.text, **chunk_kwargs) + else: + self.text_chunks = self.text + + self.summary_chunks = [] + +
+[docs] + def combine(self, text_summary): + """Combine separate chunk summaries into one more comprehensive + narrative + + Parameters + ---------- + summary : str + Summary of text. May be several disjointed paragraphs + + Returns + ------- + summary : str + Summary of text. Paragraphs will be more cohesive. + """ + role = 'You provide editorial services for technical writing.' + query = ('Can you combine the following paragraphs and ' + 'ease the transitions between them? ' + f'\n\n"""{text_summary}"""') + text_summary = self.generic_query(query, model_role=role) + return text_summary
+ + +
+[docs] + def run(self, temperature=0, fancy_combine=True): + """Use GPT to do a summary of input text. + + Parameters + ---------- + temperature : float + GPT model temperature, a measure of response entropy from 0 to 1. 0 + is more reliable and nearly deterministic; 1 will give the model + more creative freedom and may not return as factual of results. + fancy_combine : bool + Flag to use the GPT model to combine the separate outputs into a + cohesive summary. + + Returns + ------- + summary : str + Summary of text. + """ + + logger.info('Summarizing {} text chunks in serial...' + .format(len(self.text_chunks))) + summary = '' + + for i, chunk in enumerate(self.text_chunks): + logger.debug('Summarizing text chunk {} out of {}' + .format(i + 1, len(self.text_chunks))) + + msg = self.MODEL_INSTRUCTION.format(text_chunk=chunk, + n_words=self.n_words) + response = self.generic_query(msg, model_role=self.MODEL_ROLE, + temperature=temperature) + self.summary_chunks.append(response) + summary += f'\n\n{response}' + + if fancy_combine: + summary = self.combine(summary) + + logger.info('Finished all summaries.') + + return summary
+ + +
+[docs] + async def run_async(self, temperature=0, ignore_error=None, + rate_limit=40e3, fancy_combine=True): + """Run text summary asynchronously for all text chunks + + NOTE: you need to call this using the await command in ipython or + jupyter, e.g.: `out = await Summary.run_async()` + + Parameters + ---------- + temperature : float + GPT model temperature, a measure of response entropy from 0 to 1. 0 + is more reliable and nearly deterministic; 1 will give the model + more creative freedom and may not return as factual of results. + ignore_error : None | callable + Optional callable to parse API error string. If the callable + returns True, the error will be ignored, the API call will not be + tried again, and the output will be an empty string. + rate_limit : float + OpenAI API rate limit (tokens / minute). Note that the + gpt-3.5-turbo limit is 90k as of 4/2023, but we're using a large + factor of safety (~1/2) because we can only count the tokens on the + input side and assume the output is about the same count. + fancy_combine : bool + Flag to use the GPT model to combine the separate outputs into a + cohesive summary. + + Returns + ------- + summary : str + Summary of text. + """ + + logger.info('Summarizing {} text chunks asynchronously...' + .format(len(self.text_chunks))) + + queries = [] + for chunk in self.text_chunks: + msg = self.MODEL_INSTRUCTION.format(text_chunk=chunk, + n_words=self.n_words) + queries.append(msg) + + summaries = await self.generic_async_query(queries, + model_role=self.MODEL_ROLE, + temperature=temperature, + ignore_error=ignore_error, + rate_limit=rate_limit) + + self.summary_chunks = summaries + summary = '\n\n'.join(summaries) + + if fancy_combine: + summary = self.combine(summary) + + logger.info('Finished all summaries.') + + return summary
+
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/elm/tree.html b/_modules/elm/tree.html new file mode 100644 index 00000000..408abc58 --- /dev/null +++ b/_modules/elm/tree.html @@ -0,0 +1,389 @@ + + + + + + elm.tree — elm 0.0.1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for elm.tree

+# -*- coding: utf-8 -*-
+"""
+ELM decision trees.
+"""
+import networkx as nx
+import logging
+
+
+logger = logging.getLogger(__name__)
+
+
+
+[docs] +class DecisionTree: + """Class to traverse a directed graph of LLM prompts. Nodes are + prompts and edges are transitions between prompts based on conditions + being met in the LLM response.""" + + def __init__(self, graph): + """Class to traverse a directed graph of LLM prompts. Nodes are + prompts and edges are transitions between prompts based on conditions + being met in the LLM response. + + Examples + -------- + Here's a simple example to setup a decision tree graph and run with the + DecisionTree class: + + >>> import logging + >>> import networkx as nx + >>> from rex import init_logger + >>> from elm.base import ApiBase + >>> from elm.tree import DecisionTree + >>> + >>> init_logger('elm.tree') + >>> + >>> G = nx.DiGraph(text='hello', name='Grant', + api=ApiBase(model='gpt-35-turbo')) + >>> + >>> G.add_node('init', prompt='Say {text} to {name}') + >>> G.add_edge('init', 'next', condition=lambda x: 'Grant' in x) + >>> G.add_node('next', prompt='How are you?') + >>> + >>> tree = DecisionTree(G) + >>> out = tree.run() + >>> + >>> print(tree.all_messages_txt) + + Parameters + ---------- + graph : nx.DiGraph + Directed acyclic graph where nodes are LLM prompts and edges are + logical transitions based on the response. Must have high-level + graph attribute "api" which is an ApiBase instance. Nodes should + have attribute "prompt" which can have {format} named arguments + that will be filled from the high-level graph attributes. Edges can + have attribute "condition" that is a callable to be executed on the + LLM response text. An edge from a node without a condition acts as + an "else" statement if no other edge conditions are satisfied. A + single edge from node to node does not need a condition. + """ + self._g = graph + self._history = [] + assert isinstance(self.graph, nx.DiGraph) + assert 'api' in self.graph.graph + + @property + def api(self): + """Get the ApiBase object. + + Returns + ------- + ApiBase + """ + return self.graph.graph['api'] + + @property + def messages(self): + """Get a list of the conversation messages with the LLM. + + Returns + ------- + list + """ + return self.api.messages + + @property + def all_messages_txt(self): + """Get a printout of the full conversation with the LLM + + Returns + ------- + str + """ + return self.api.all_messages_txt + + @property + def history(self): + """Get a record of the nodes traversed in the tree + + Returns + ------- + list + """ + return self._history + + @property + def graph(self): + """Get the networkx graph object + + Returns + ------- + nx.DiGraph + """ + return self._g + +
+[docs] + def call_node(self, node0): + """Call the LLM with the prompt from the input node and search the + successor edges for a valid transition condition + + Parameters + ---------- + node0 : str + Name of node being executed. + + Returns + ------- + out : str + Next node or LLM response if at a leaf node. + """ + + prompt = self.graph.nodes[node0]['prompt'] + txt_fmt = {k: v for k, v in self.graph.graph.items() if k != 'api'} + prompt = prompt.format(**txt_fmt) + + self._history.append(node0) + out = self.api.chat(prompt) + + successors = list(self.graph.successors(node0)) + edges = [self.graph.edges[(node0, node1)] for node1 in successors] + conditions = [edge.get('condition', None) for edge in edges] + + if len(successors) == 0: + logger.info(f'Reached leaf node "{node0}".') + return out + + if len(successors) > 1 and all(c is None for c in conditions): + msg = (f'At least one of the edges from "{node0}" should have ' + f'a "condition": {edges}') + logger.error(msg) + raise AttributeError(msg) + + # prioritize callable conditions + for i, condition in enumerate(conditions): + if callable(condition) and condition(out): + logger.info(f'Node transition: "{node0}" -> "{successors[i]}" ' + '(satisfied by callable condition)') + return successors[i] + + # None condition is basically "else" statement + for i, condition in enumerate(conditions): + if condition is None: + logger.info(f'Node transition: "{node0}" -> "{successors[i]}" ' + '(satisfied by None condition)') + return successors[i] + + msg = (f'None of the edge conditions from "{node0}" ' + f'were satisfied: {edges}') + logger.error(msg) + raise AttributeError(msg)
+ + +
+[docs] + def run(self, node0='init'): + """Traverse the decision tree starting at the input node. + + Parameters + ---------- + node0 : str + Name of starting node in the graph. This is typically called "init" + + Returns + ------- + out : str + Final response from LLM at the leaf node. + """ + + self._history = [] + + while True: + try: + out = self.call_node(node0) + except Exception as e: + last_message = self.messages[-1]['content'] + msg = ('Ran into an exception when traversing tree. ' + 'Last message from LLM is printed below. ' + 'See debug logs for more detail. ' + '\nLast message: \n' + f'"""\n{last_message}\n"""') + logger.debug('Error traversing trees, heres the full ' + 'conversation printout:' + f'\n{self.all_messages_txt}') + logger.error(msg) + raise RuntimeError(msg) from e + if out in self.graph: + node0 = out + else: + break + + logger.info(f'Output: {out}') + + return out
+
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/elm/wizard.html b/_modules/elm/wizard.html new file mode 100644 index 00000000..b568ef45 --- /dev/null +++ b/_modules/elm/wizard.html @@ -0,0 +1,505 @@ + + + + + + elm.wizard — elm 0.0.1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for elm.wizard

+# -*- coding: utf-8 -*-
+"""
+ELM energy wizard
+"""
+import copy
+import numpy as np
+import openai
+
+from elm.base import ApiBase
+
+
+
+[docs] +class EnergyWizard(ApiBase): + """Interface to ask OpenAI LLMs about energy research.""" + + MODEL_ROLE = "You parse through articles to answer questions." + """High level model role, somewhat redundant to MODEL_INSTRUCTION""" + + MODEL_INSTRUCTION = ('Use the information below to answer the subsequent ' + 'question. If the answer cannot be found in the ' + 'text, write "I could not find an answer."') + """Prefix to the engineered prompt""" + + def __init__(self, corpus, model=None, token_budget=3500, ref_col=None): + """ + Parameters + ---------- + corpus : pd.DataFrame + Corpus of text in dataframe format. Must have columns "text" and + "embedding". + model : str + GPT model name, default is the DEFAULT_MODEL global var + token_budget : int + Number of tokens that can be embedded in the prompt. Note that the + default budget for GPT-3.5-Turbo is 4096, but you want to subtract + some tokens to account for the response budget. + ref_col : None | str + Optional column label in the corpus that provides a reference text + string for each chunk of text. + """ + + super().__init__(model) + self.corpus = self.preflight_corpus(corpus) + self.token_budget = token_budget + self.embedding_arr = np.vstack(self.corpus['embedding'].values) + self.text_arr = self.corpus['text'].values + self.ref_col = ref_col + +
+[docs] + @staticmethod + def preflight_corpus(corpus, required=('text', 'embedding')): + """Run preflight checks on the text corpus. + + Parameters + ---------- + corpus : pd.DataFrame + Corpus of text in dataframe format. Must have columns "text" and + "embedding". + required : list | tuple + Column names required to be in the corpus df + + Returns + ------- + corpus : pd.DataFrame + Corpus of text in dataframe format. Must have columns "text" and + "embedding". + """ + missing = [col for col in required if col not in corpus] + if any(missing): + msg = ('Text corpus must have {} columns but received ' + 'corpus with columns: {}' + .format(missing, list(corpus.columns))) + raise KeyError(msg) + return corpus
+ + +
+[docs] + def cosine_dist(self, query_embedding): + """Compute the cosine distance of the query embedding array vs. all of + the embedding arrays of the full text corpus + + Parameters + ---------- + query_embedding : np.ndarray + 1D array of the numerical embedding of the request query. + + Returns + ------- + out : np.ndarray + 1D array with length equal to the number of entries in the text + corpus. Each value is a distance score where smaller is closer + """ + + dot = np.dot(self.embedding_arr, query_embedding) + norm1 = np.linalg.norm(query_embedding) + norm2 = np.linalg.norm(self.embedding_arr, axis=1) + + out = 1 - (dot / (norm1 * norm2)) + + return out
+ + +
+[docs] + def rank_strings(self, query, top_n=100): + """Returns a list of strings and relatednesses, sorted from most + related to least. + + Parameters + ---------- + query : str + Question being asked of GPT + top_n : int + Number of top results to return. + + Returns + ------- + strings : np.ndarray + 1D array of related strings + score : np.ndarray + 1D array of float scores of strings + idx : np.ndarray + 1D array of indices in the text corpus corresponding to the + ranked strings/scores outputs. + """ + + embedding = self.get_embedding(query) + scores = 1 - self.cosine_dist(embedding) + best = np.argsort(scores)[::-1][:top_n] + + strings = self.text_arr[best] + scores = scores[best] + + return strings, scores, best
+ + +
+[docs] + def engineer_query(self, query, token_budget=None, new_info_threshold=0.7, + convo=False): + """Engineer a query for GPT using the corpus of information + + Parameters + ---------- + query : str + Question being asked of GPT + token_budget : int + Option to override the class init token budget. + new_info_threshold : float + New text added to the engineered query must contain at least this + much new information. This helps prevent (for example) the table of + contents being added multiple times. + convo : bool + Flag to perform semantic search with full conversation history + (True) or just the single query (False). Call EnergyWizard.clear() + to reset the chat history. + Returns + ------- + message : str + Engineered question to GPT including information from corpus and + the original query + references : list + The list of references (strs) used in the engineered prompt is + returned here + """ + + self.messages.append({"role": "user", "content": query}) + + if convo: + # [1:] to not include the system role in the semantic search + query = [f"{msg['role'].upper()}: {msg['content']}" + for msg in self.messages[1:]] + query = '\n\n'.join(query) + + token_budget = token_budget or self.token_budget + + strings, _, idx = self.rank_strings(query) + + message = copy.deepcopy(self.MODEL_INSTRUCTION) + question = f"\n\nQuestion: {query}" + used_index = [] + + for string, i in zip(strings, idx): + next_str = (f'\n\n"""\n{string}\n"""') + token_usage = self.count_tokens(message + next_str + question, + self.model) + + new_words = set(next_str.split(' ')) + additional_info = new_words - set(message.split(' ')) + new_info_frac = len(additional_info) / len(new_words) + + if new_info_frac > new_info_threshold: + if token_usage > token_budget: + break + else: + message += next_str + used_index.append(i) + + message = message + question + used_index = np.array(used_index) + references = self.make_ref_list(used_index) + + return message, references
+ + +
+[docs] + def make_ref_list(self, idx): + """Make a reference list + + Parameters + ---------- + used_index : np.ndarray + Indices of the used text from the text corpus + + Returns + ------- + ref_list : list + A list of references (strs) used. + """ + ref_list = '' + if self.ref_col is not None and self.ref_col in self.corpus: + ref_list = list(self.corpus[self.ref_col].iloc[idx].unique()) + + return ref_list
+ + +
+[docs] + def chat(self, query, + debug=True, + stream=True, + temperature=0, + convo=False, + token_budget=None, + new_info_threshold=0.7, + print_references=False, + return_chat_obj=False): + """Answers a query by doing a semantic search of relevant text with + embeddings and then sending engineered query to the LLM. + + Parameters + ---------- + query : str + Question being asked of EnergyWizard + debug : bool + Flag to return extra diagnostics on the engineered question. + stream : bool + Flag to print subsequent chunks of the response in a streaming + fashion + temperature : float + GPT model temperature, a measure of response entropy from 0 to 1. 0 + is more reliable and nearly deterministic; 1 will give the model + more creative freedom and may not return as factual of results. + convo : bool + Flag to perform semantic search with full conversation history + (True) or just the single query (False). Call EnergyWizard.clear() + to reset the chat history. + token_budget : int + Option to override the class init token budget. + new_info_threshold : float + New text added to the engineered query must contain at least this + much new information. This helps prevent (for example) the table of + contents being added multiple times. + print_references : bool + Flag to print references if EnergyWizard is initialized with a + valid ref_col. + return_chat_obj : bool + Flag to only return the ChatCompletion from OpenAI API. + + Returns + ------- + response : str + GPT output / answer. + query : str + If debug is True, the engineered query asked of GPT will also be + returned here + references : list + If debug is True, the list of references (strs) used in the + engineered prompt is returned here + """ + + out = self.engineer_query(query, token_budget=token_budget, + new_info_threshold=new_info_threshold, + convo=convo) + query, references = out + + messages = [{"role": "system", "content": self.MODEL_ROLE}, + {"role": "user", "content": query}] + response_message = '' + kwargs = dict(model=self.model, + messages=messages, + temperature=temperature, + stream=stream) + + if 'azure' in str(openai.api_type).lower(): + kwargs['engine'] = self.model + + response = openai.ChatCompletion.create(**kwargs) + + if return_chat_obj: + return response, query, references + + if stream: + for chunk in response: + chunk_msg = chunk['choices'][0]['delta'] + chunk_msg = chunk_msg.get('content', '') + response_message += chunk_msg + print(chunk_msg, end='') + + else: + response_message = response["choices"][0]["message"]["content"] + + self.messages.append({'role': 'assistant', + 'content': response_message}) + + if any(references) and print_references: + ref_msg = ('\n\nThe model was provided with the ' + 'following documents to support its answer:') + ref_msg += '\n - ' + '\n - '.join(references) + response_message += ref_msg + if stream: + print(ref_msg) + + if debug: + return response_message, query, references + else: + return response_message
+
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/index.html b/_modules/index.html new file mode 100644 index 00000000..a94e3357 --- /dev/null +++ b/_modules/index.html @@ -0,0 +1,179 @@ + + + + + + Overview: module code — elm 0.0.1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ +

All modules for which code is available

+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_sources/_autosummary/elm.base.ApiBase.rst.txt b/_sources/_autosummary/elm.base.ApiBase.rst.txt new file mode 100644 index 00000000..0451a30a --- /dev/null +++ b/_sources/_autosummary/elm.base.ApiBase.rst.txt @@ -0,0 +1,43 @@ +elm.base.ApiBase +================ + +.. currentmodule:: elm.base + +.. autoclass:: ApiBase + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~ApiBase.call_api + ~ApiBase.call_api_async + ~ApiBase.chat + ~ApiBase.clear + ~ApiBase.count_tokens + ~ApiBase.generic_async_query + ~ApiBase.generic_query + ~ApiBase.get_embedding + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ApiBase.DEFAULT_MODEL + ~ApiBase.EMBEDDING_MODEL + ~ApiBase.EMBEDDING_URL + ~ApiBase.HEADERS + ~ApiBase.MODEL_ROLE + ~ApiBase.URL + ~ApiBase.all_messages_txt + + \ No newline at end of file diff --git a/_sources/_autosummary/elm.base.ApiQueue.rst.txt b/_sources/_autosummary/elm.base.ApiQueue.rst.txt new file mode 100644 index 00000000..b93b6594 --- /dev/null +++ b/_sources/_autosummary/elm.base.ApiQueue.rst.txt @@ -0,0 +1,32 @@ +elm.base.ApiQueue +================= + +.. currentmodule:: elm.base + +.. autoclass:: ApiQueue + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~ApiQueue.collect_jobs + ~ApiQueue.run + ~ApiQueue.submit_jobs + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ApiQueue.waiting_on + + \ No newline at end of file diff --git a/_sources/_autosummary/elm.base.rst.txt b/_sources/_autosummary/elm.base.rst.txt new file mode 100644 index 00000000..87aaf975 --- /dev/null +++ b/_sources/_autosummary/elm.base.rst.txt @@ -0,0 +1,32 @@ +elm.base +======== + +.. automodule:: elm.base + + + + + + + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + ApiBase + ApiQueue + + + + + + + + + diff --git a/_sources/_autosummary/elm.chunk.Chunker.rst.txt b/_sources/_autosummary/elm.chunk.Chunker.rst.txt new file mode 100644 index 00000000..5e06ef21 --- /dev/null +++ b/_sources/_autosummary/elm.chunk.Chunker.rst.txt @@ -0,0 +1,52 @@ +elm.chunk.Chunker +================= + +.. currentmodule:: elm.chunk + +.. autoclass:: Chunker + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~Chunker.add_overlap + ~Chunker.call_api + ~Chunker.call_api_async + ~Chunker.chat + ~Chunker.chunk_text + ~Chunker.clean_paragraphs + ~Chunker.clear + ~Chunker.count_tokens + ~Chunker.generic_async_query + ~Chunker.generic_query + ~Chunker.get_embedding + ~Chunker.is_good_paragraph + ~Chunker.merge_chunks + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~Chunker.DEFAULT_MODEL + ~Chunker.EMBEDDING_MODEL + ~Chunker.EMBEDDING_URL + ~Chunker.HEADERS + ~Chunker.MODEL_ROLE + ~Chunker.URL + ~Chunker.all_messages_txt + ~Chunker.chunk_tokens + ~Chunker.chunks + ~Chunker.paragraph_tokens + ~Chunker.paragraphs + + \ No newline at end of file diff --git a/_sources/_autosummary/elm.chunk.rst.txt b/_sources/_autosummary/elm.chunk.rst.txt new file mode 100644 index 00000000..446f930b --- /dev/null +++ b/_sources/_autosummary/elm.chunk.rst.txt @@ -0,0 +1,31 @@ +elm.chunk +========= + +.. automodule:: elm.chunk + + + + + + + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + Chunker + + + + + + + + + diff --git a/_sources/_autosummary/elm.embed.ChunkAndEmbed.rst.txt b/_sources/_autosummary/elm.embed.ChunkAndEmbed.rst.txt new file mode 100644 index 00000000..dafa5a73 --- /dev/null +++ b/_sources/_autosummary/elm.embed.ChunkAndEmbed.rst.txt @@ -0,0 +1,46 @@ +elm.embed.ChunkAndEmbed +======================= + +.. currentmodule:: elm.embed + +.. autoclass:: ChunkAndEmbed + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~ChunkAndEmbed.call_api + ~ChunkAndEmbed.call_api_async + ~ChunkAndEmbed.chat + ~ChunkAndEmbed.clean_tables + ~ChunkAndEmbed.clear + ~ChunkAndEmbed.count_tokens + ~ChunkAndEmbed.generic_async_query + ~ChunkAndEmbed.generic_query + ~ChunkAndEmbed.get_embedding + ~ChunkAndEmbed.run + ~ChunkAndEmbed.run_async + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~ChunkAndEmbed.DEFAULT_MODEL + ~ChunkAndEmbed.EMBEDDING_MODEL + ~ChunkAndEmbed.EMBEDDING_URL + ~ChunkAndEmbed.HEADERS + ~ChunkAndEmbed.MODEL_ROLE + ~ChunkAndEmbed.URL + ~ChunkAndEmbed.all_messages_txt + + \ No newline at end of file diff --git a/_sources/_autosummary/elm.embed.rst.txt b/_sources/_autosummary/elm.embed.rst.txt new file mode 100644 index 00000000..64866b78 --- /dev/null +++ b/_sources/_autosummary/elm.embed.rst.txt @@ -0,0 +1,31 @@ +elm.embed +========= + +.. automodule:: elm.embed + + + + + + + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + ChunkAndEmbed + + + + + + + + + diff --git a/_sources/_autosummary/elm.osti.OstiList.rst.txt b/_sources/_autosummary/elm.osti.OstiList.rst.txt new file mode 100644 index 00000000..b5471bf7 --- /dev/null +++ b/_sources/_autosummary/elm.osti.OstiList.rst.txt @@ -0,0 +1,43 @@ +elm.osti.OstiList +================= + +.. currentmodule:: elm.osti + +.. autoclass:: OstiList + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~OstiList.append + ~OstiList.clear + ~OstiList.copy + ~OstiList.count + ~OstiList.download + ~OstiList.extend + ~OstiList.from_osti_ids + ~OstiList.index + ~OstiList.insert + ~OstiList.pop + ~OstiList.remove + ~OstiList.reverse + ~OstiList.sort + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~OstiList.BASE_URL + ~OstiList.meta + + \ No newline at end of file diff --git a/_sources/_autosummary/elm.osti.OstiRecord.rst.txt b/_sources/_autosummary/elm.osti.OstiRecord.rst.txt new file mode 100644 index 00000000..89f3bf54 --- /dev/null +++ b/_sources/_autosummary/elm.osti.OstiRecord.rst.txt @@ -0,0 +1,48 @@ +elm.osti.OstiRecord +=================== + +.. currentmodule:: elm.osti + +.. autoclass:: OstiRecord + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~OstiRecord.clear + ~OstiRecord.copy + ~OstiRecord.download + ~OstiRecord.fromkeys + ~OstiRecord.get + ~OstiRecord.items + ~OstiRecord.keys + ~OstiRecord.pop + ~OstiRecord.popitem + ~OstiRecord.setdefault + ~OstiRecord.strip_nested_brackets + ~OstiRecord.update + ~OstiRecord.values + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~OstiRecord.authors + ~OstiRecord.date + ~OstiRecord.doi + ~OstiRecord.osti_id + ~OstiRecord.title + ~OstiRecord.url + ~OstiRecord.year + + \ No newline at end of file diff --git a/_sources/_autosummary/elm.osti.rst.txt b/_sources/_autosummary/elm.osti.rst.txt new file mode 100644 index 00000000..0f490ba7 --- /dev/null +++ b/_sources/_autosummary/elm.osti.rst.txt @@ -0,0 +1,32 @@ +elm.osti +======== + +.. automodule:: elm.osti + + + + + + + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + OstiList + OstiRecord + + + + + + + + + diff --git a/_sources/_autosummary/elm.pdf.PDFtoTXT.rst.txt b/_sources/_autosummary/elm.pdf.PDFtoTXT.rst.txt new file mode 100644 index 00000000..e4da2c76 --- /dev/null +++ b/_sources/_autosummary/elm.pdf.PDFtoTXT.rst.txt @@ -0,0 +1,53 @@ +elm.pdf.PDFtoTXT +================ + +.. currentmodule:: elm.pdf + +.. autoclass:: PDFtoTXT + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~PDFtoTXT.call_api + ~PDFtoTXT.call_api_async + ~PDFtoTXT.chat + ~PDFtoTXT.clean_headers + ~PDFtoTXT.clean_poppler + ~PDFtoTXT.clean_txt + ~PDFtoTXT.clean_txt_async + ~PDFtoTXT.clear + ~PDFtoTXT.combine_pages + ~PDFtoTXT.count_tokens + ~PDFtoTXT.generic_async_query + ~PDFtoTXT.generic_query + ~PDFtoTXT.get_embedding + ~PDFtoTXT.is_double_col + ~PDFtoTXT.load_pdf + ~PDFtoTXT.make_gpt_messages + ~PDFtoTXT.validate_clean + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~PDFtoTXT.DEFAULT_MODEL + ~PDFtoTXT.EMBEDDING_MODEL + ~PDFtoTXT.EMBEDDING_URL + ~PDFtoTXT.HEADERS + ~PDFtoTXT.MODEL_INSTRUCTION + ~PDFtoTXT.MODEL_ROLE + ~PDFtoTXT.URL + ~PDFtoTXT.all_messages_txt + + \ No newline at end of file diff --git a/_sources/_autosummary/elm.pdf.rst.txt b/_sources/_autosummary/elm.pdf.rst.txt new file mode 100644 index 00000000..892a60a3 --- /dev/null +++ b/_sources/_autosummary/elm.pdf.rst.txt @@ -0,0 +1,31 @@ +elm.pdf +======= + +.. automodule:: elm.pdf + + + + + + + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + PDFtoTXT + + + + + + + + + diff --git a/_sources/_autosummary/elm.rst.txt b/_sources/_autosummary/elm.rst.txt new file mode 100644 index 00000000..e3cc2cfa --- /dev/null +++ b/_sources/_autosummary/elm.rst.txt @@ -0,0 +1,38 @@ +elm +=== + +.. automodule:: elm + + + + + + + + + + + + + + + + + + + +.. autosummary:: + :toctree: + :template: custom-module-template.rst + :recursive: + + elm.base + elm.chunk + elm.embed + elm.osti + elm.pdf + elm.summary + elm.tree + elm.version + elm.wizard + diff --git a/_sources/_autosummary/elm.summary.Summary.rst.txt b/_sources/_autosummary/elm.summary.Summary.rst.txt new file mode 100644 index 00000000..9ac3d432 --- /dev/null +++ b/_sources/_autosummary/elm.summary.Summary.rst.txt @@ -0,0 +1,47 @@ +elm.summary.Summary +=================== + +.. currentmodule:: elm.summary + +.. autoclass:: Summary + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~Summary.call_api + ~Summary.call_api_async + ~Summary.chat + ~Summary.clear + ~Summary.combine + ~Summary.count_tokens + ~Summary.generic_async_query + ~Summary.generic_query + ~Summary.get_embedding + ~Summary.run + ~Summary.run_async + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~Summary.DEFAULT_MODEL + ~Summary.EMBEDDING_MODEL + ~Summary.EMBEDDING_URL + ~Summary.HEADERS + ~Summary.MODEL_INSTRUCTION + ~Summary.MODEL_ROLE + ~Summary.URL + ~Summary.all_messages_txt + + \ No newline at end of file diff --git a/_sources/_autosummary/elm.summary.rst.txt b/_sources/_autosummary/elm.summary.rst.txt new file mode 100644 index 00000000..9b7ba323 --- /dev/null +++ b/_sources/_autosummary/elm.summary.rst.txt @@ -0,0 +1,31 @@ +elm.summary +=========== + +.. automodule:: elm.summary + + + + + + + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + Summary + + + + + + + + + diff --git a/_sources/_autosummary/elm.tree.DecisionTree.rst.txt b/_sources/_autosummary/elm.tree.DecisionTree.rst.txt new file mode 100644 index 00000000..3d88e8d7 --- /dev/null +++ b/_sources/_autosummary/elm.tree.DecisionTree.rst.txt @@ -0,0 +1,35 @@ +elm.tree.DecisionTree +===================== + +.. currentmodule:: elm.tree + +.. autoclass:: DecisionTree + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~DecisionTree.call_node + ~DecisionTree.run + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~DecisionTree.all_messages_txt + ~DecisionTree.api + ~DecisionTree.graph + ~DecisionTree.history + ~DecisionTree.messages + + \ No newline at end of file diff --git a/_sources/_autosummary/elm.tree.rst.txt b/_sources/_autosummary/elm.tree.rst.txt new file mode 100644 index 00000000..4b6a4abf --- /dev/null +++ b/_sources/_autosummary/elm.tree.rst.txt @@ -0,0 +1,31 @@ +elm.tree +======== + +.. automodule:: elm.tree + + + + + + + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + DecisionTree + + + + + + + + + diff --git a/_sources/_autosummary/elm.version.rst.txt b/_sources/_autosummary/elm.version.rst.txt new file mode 100644 index 00000000..dbddecd2 --- /dev/null +++ b/_sources/_autosummary/elm.version.rst.txt @@ -0,0 +1,23 @@ +elm.version +=========== + +.. automodule:: elm.version + + + + + + + + + + + + + + + + + + + diff --git a/_sources/_autosummary/elm.wizard.EnergyWizard.rst.txt b/_sources/_autosummary/elm.wizard.EnergyWizard.rst.txt new file mode 100644 index 00000000..9c341433 --- /dev/null +++ b/_sources/_autosummary/elm.wizard.EnergyWizard.rst.txt @@ -0,0 +1,49 @@ +elm.wizard.EnergyWizard +======================= + +.. currentmodule:: elm.wizard + +.. autoclass:: EnergyWizard + :members: + :show-inheritance: + :inherited-members: + :special-members: __call__, __add__, __mul__ + + + + .. rubric:: Methods + + .. autosummary:: + + ~EnergyWizard.call_api + ~EnergyWizard.call_api_async + ~EnergyWizard.chat + ~EnergyWizard.clear + ~EnergyWizard.cosine_dist + ~EnergyWizard.count_tokens + ~EnergyWizard.engineer_query + ~EnergyWizard.generic_async_query + ~EnergyWizard.generic_query + ~EnergyWizard.get_embedding + ~EnergyWizard.make_ref_list + ~EnergyWizard.preflight_corpus + ~EnergyWizard.rank_strings + + + + + + .. rubric:: Attributes + + .. autosummary:: + + ~EnergyWizard.DEFAULT_MODEL + ~EnergyWizard.EMBEDDING_MODEL + ~EnergyWizard.EMBEDDING_URL + ~EnergyWizard.HEADERS + ~EnergyWizard.MODEL_INSTRUCTION + ~EnergyWizard.MODEL_ROLE + ~EnergyWizard.URL + ~EnergyWizard.all_messages_txt + + \ No newline at end of file diff --git a/_sources/_autosummary/elm.wizard.rst.txt b/_sources/_autosummary/elm.wizard.rst.txt new file mode 100644 index 00000000..020f7693 --- /dev/null +++ b/_sources/_autosummary/elm.wizard.rst.txt @@ -0,0 +1,31 @@ +elm.wizard +========== + +.. automodule:: elm.wizard + + + + + + + + + + + + .. rubric:: Classes + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + + EnergyWizard + + + + + + + + + diff --git a/_sources/api.rst.txt b/_sources/api.rst.txt new file mode 100644 index 00000000..14895b3d --- /dev/null +++ b/_sources/api.rst.txt @@ -0,0 +1,6 @@ +.. autosummary:: + :toctree: _autosummary + :template: custom-module-template.rst + :recursive: + + elm diff --git a/_sources/index.rst.txt b/_sources/index.rst.txt new file mode 100644 index 00000000..a7945361 --- /dev/null +++ b/_sources/index.rst.txt @@ -0,0 +1,8 @@ +.. toctree:: + :hidden: + + Home page + Installation + API reference <_autosummary/elm> + +.. include:: ../../README.rst diff --git a/_sources/installation.rst.txt b/_sources/installation.rst.txt new file mode 100644 index 00000000..6b45526f --- /dev/null +++ b/_sources/installation.rst.txt @@ -0,0 +1,6 @@ +Installation +============ + +.. include:: ../../README.rst + :start-after: install + :end-before: acknowledgements diff --git a/_static/_sphinx_javascript_frameworks_compat.js b/_static/_sphinx_javascript_frameworks_compat.js new file mode 100644 index 00000000..81415803 --- /dev/null +++ b/_static/_sphinx_javascript_frameworks_compat.js @@ -0,0 +1,123 @@ +/* Compatability shim for jQuery and underscores.js. + * + * Copyright Sphinx contributors + * Released under the two clause BSD licence + */ + +/** + * small helper function to urldecode strings + * + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL + */ +jQuery.urldecode = function(x) { + if (!x) { + return x + } + return decodeURIComponent(x.replace(/\+/g, ' ')); +}; + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s === 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node, addItems) { + if (node.nodeType === 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && + !jQuery(node.parentNode).hasClass(className) && + !jQuery(node.parentNode).hasClass("nohighlight")) { + var span; + var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.className = className; + } + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + if (isInSVG) { + var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); + var bbox = node.parentElement.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute('class', className); + addItems.push({ + "parent": node.parentNode, + "target": rect}); + } + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this, addItems); + }); + } + } + var addItems = []; + var result = this.each(function() { + highlight(this, addItems); + }); + for (var i = 0; i < addItems.length; ++i) { + jQuery(addItems[i].parent).before(addItems[i].target); + } + return result; +}; + +/* + * backward compatibility for jQuery.browser + * This will be supported until firefox bug is fixed. + */ +if (!jQuery.browser) { + jQuery.uaMatch = function(ua) { + ua = ua.toLowerCase(); + + var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || + /(webkit)[ \/]([\w.]+)/.exec(ua) || + /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || + /(msie) ([\w.]+)/.exec(ua) || + ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || + []; + + return { + browser: match[ 1 ] || "", + version: match[ 2 ] || "0" + }; + }; + jQuery.browser = {}; + jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; +} diff --git a/_static/basic.css b/_static/basic.css new file mode 100644 index 00000000..30fee9d0 --- /dev/null +++ b/_static/basic.css @@ -0,0 +1,925 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +a:visited { + color: #551A8B; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/_static/css/badge_only.css b/_static/css/badge_only.css new file mode 100644 index 00000000..c718cee4 --- /dev/null +++ b/_static/css/badge_only.css @@ -0,0 +1 @@ +.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} \ No newline at end of file diff --git a/_static/css/fonts/Roboto-Slab-Bold.woff b/_static/css/fonts/Roboto-Slab-Bold.woff new file mode 100644 index 00000000..6cb60000 Binary files /dev/null and b/_static/css/fonts/Roboto-Slab-Bold.woff differ diff --git a/_static/css/fonts/Roboto-Slab-Bold.woff2 b/_static/css/fonts/Roboto-Slab-Bold.woff2 new file mode 100644 index 00000000..7059e231 Binary files /dev/null and b/_static/css/fonts/Roboto-Slab-Bold.woff2 differ diff --git a/_static/css/fonts/Roboto-Slab-Regular.woff b/_static/css/fonts/Roboto-Slab-Regular.woff new file mode 100644 index 00000000..f815f63f Binary files /dev/null and b/_static/css/fonts/Roboto-Slab-Regular.woff differ diff --git a/_static/css/fonts/Roboto-Slab-Regular.woff2 b/_static/css/fonts/Roboto-Slab-Regular.woff2 new file mode 100644 index 00000000..f2c76e5b Binary files /dev/null and b/_static/css/fonts/Roboto-Slab-Regular.woff2 differ diff --git a/_static/css/fonts/fontawesome-webfont.eot b/_static/css/fonts/fontawesome-webfont.eot new file mode 100644 index 00000000..e9f60ca9 Binary files /dev/null and b/_static/css/fonts/fontawesome-webfont.eot differ diff --git a/_static/css/fonts/fontawesome-webfont.svg b/_static/css/fonts/fontawesome-webfont.svg new file mode 100644 index 00000000..855c845e --- /dev/null +++ b/_static/css/fonts/fontawesome-webfont.svg @@ -0,0 +1,2671 @@ + + + + +Created by FontForge 20120731 at Mon Oct 24 17:37:40 2016 + By ,,, +Copyright Dave Gandy 2016. All rights reserved. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/_static/css/fonts/fontawesome-webfont.ttf b/_static/css/fonts/fontawesome-webfont.ttf new file mode 100644 index 00000000..35acda2f Binary files /dev/null and b/_static/css/fonts/fontawesome-webfont.ttf differ diff --git a/_static/css/fonts/fontawesome-webfont.woff b/_static/css/fonts/fontawesome-webfont.woff new file mode 100644 index 00000000..400014a4 Binary files /dev/null and b/_static/css/fonts/fontawesome-webfont.woff differ diff --git a/_static/css/fonts/fontawesome-webfont.woff2 b/_static/css/fonts/fontawesome-webfont.woff2 new file mode 100644 index 00000000..4d13fc60 Binary files /dev/null and b/_static/css/fonts/fontawesome-webfont.woff2 differ diff --git a/_static/css/fonts/lato-bold-italic.woff b/_static/css/fonts/lato-bold-italic.woff new file mode 100644 index 00000000..88ad05b9 Binary files /dev/null and b/_static/css/fonts/lato-bold-italic.woff differ diff --git a/_static/css/fonts/lato-bold-italic.woff2 b/_static/css/fonts/lato-bold-italic.woff2 new file mode 100644 index 00000000..c4e3d804 Binary files /dev/null and b/_static/css/fonts/lato-bold-italic.woff2 differ diff --git a/_static/css/fonts/lato-bold.woff b/_static/css/fonts/lato-bold.woff new file mode 100644 index 00000000..c6dff51f Binary files /dev/null and b/_static/css/fonts/lato-bold.woff differ diff --git a/_static/css/fonts/lato-bold.woff2 b/_static/css/fonts/lato-bold.woff2 new file mode 100644 index 00000000..bb195043 Binary files /dev/null and b/_static/css/fonts/lato-bold.woff2 differ diff --git a/_static/css/fonts/lato-normal-italic.woff b/_static/css/fonts/lato-normal-italic.woff new file mode 100644 index 00000000..76114bc0 Binary files /dev/null and b/_static/css/fonts/lato-normal-italic.woff differ diff --git a/_static/css/fonts/lato-normal-italic.woff2 b/_static/css/fonts/lato-normal-italic.woff2 new file mode 100644 index 00000000..3404f37e Binary files /dev/null and b/_static/css/fonts/lato-normal-italic.woff2 differ diff --git a/_static/css/fonts/lato-normal.woff b/_static/css/fonts/lato-normal.woff new file mode 100644 index 00000000..ae1307ff Binary files /dev/null and b/_static/css/fonts/lato-normal.woff differ diff --git a/_static/css/fonts/lato-normal.woff2 b/_static/css/fonts/lato-normal.woff2 new file mode 100644 index 00000000..3bf98433 Binary files /dev/null and b/_static/css/fonts/lato-normal.woff2 differ diff --git a/_static/css/theme.css b/_static/css/theme.css new file mode 100644 index 00000000..19a446a0 --- /dev/null +++ b/_static/css/theme.css @@ -0,0 +1,4 @@ +html{box-sizing:border-box}*,:after,:before{box-sizing:inherit}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}[hidden],audio:not([controls]){display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}blockquote{margin:0}dfn{font-style:italic}ins{background:#ff9;text-decoration:none}ins,mark{color:#000}mark{background:#ff0;font-style:italic;font-weight:700}.rst-content code,.rst-content tt,code,kbd,pre,samp{font-family:monospace,serif;_font-family:courier new,monospace;font-size:1em}pre{white-space:pre}q{quotes:none}q:after,q:before{content:"";content:none}small{font-size:85%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}dl,ol,ul{margin:0;padding:0;list-style:none;list-style-image:none}li{list-style:none}dd{margin:0}img{border:0;-ms-interpolation-mode:bicubic;vertical-align:middle;max-width:100%}svg:not(:root){overflow:hidden}figure,form{margin:0}label{cursor:pointer}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button,input[type=button],input[type=reset],input[type=submit]{cursor:pointer;-webkit-appearance:button;*overflow:visible}button[disabled],input[disabled]{cursor:default}input[type=search]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}textarea{resize:vertical}table{border-collapse:collapse;border-spacing:0}td{vertical-align:top}.chromeframe{margin:.2em 0;background:#ccc;color:#000;padding:.2em 0}.ir{display:block;border:0;text-indent:-999em;overflow:hidden;background-color:transparent;background-repeat:no-repeat;text-align:left;direction:ltr;*line-height:0}.ir br{display:none}.hidden{display:none!important;visibility:hidden}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.relative{position:relative}big,small{font-size:100%}@media print{body,html,section{background:none!important}*{box-shadow:none!important;text-shadow:none!important;filter:none!important;-ms-filter:none!important}a,a:visited{text-decoration:underline}.ir a:after,a[href^="#"]:after,a[href^="javascript:"]:after{content:""}blockquote,pre{page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}.rst-content .toctree-wrapper>p.caption,h2,h3,p{orphans:3;widows:3}.rst-content .toctree-wrapper>p.caption,h2,h3{page-break-after:avoid}}.btn,.fa:before,.icon:before,.rst-content .admonition,.rst-content .admonition-title:before,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .code-block-caption .headerlink:before,.rst-content .danger,.rst-content .eqno .headerlink:before,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-alert,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before,input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week],select,textarea{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}/*! + * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome + * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) + */@font-face{font-family:FontAwesome;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713);src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix&v=4.7.0) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#fontawesomeregular) format("svg");font-weight:400;font-style:normal}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14286em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14286em;width:2.14286em;top:.14286em;text-align:center}.fa-li.fa-lg{left:-1.85714em}.fa-border{padding:.2em .25em .15em;border:.08em solid #eee;border-radius:.1em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa-pull-left.icon,.fa.fa-pull-left,.rst-content .code-block-caption .fa-pull-left.headerlink,.rst-content .eqno .fa-pull-left.headerlink,.rst-content .fa-pull-left.admonition-title,.rst-content code.download span.fa-pull-left:first-child,.rst-content dl dt .fa-pull-left.headerlink,.rst-content h1 .fa-pull-left.headerlink,.rst-content h2 .fa-pull-left.headerlink,.rst-content h3 .fa-pull-left.headerlink,.rst-content h4 .fa-pull-left.headerlink,.rst-content h5 .fa-pull-left.headerlink,.rst-content h6 .fa-pull-left.headerlink,.rst-content p .fa-pull-left.headerlink,.rst-content table>caption .fa-pull-left.headerlink,.rst-content tt.download span.fa-pull-left:first-child,.wy-menu-vertical li.current>a button.fa-pull-left.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-left.toctree-expand,.wy-menu-vertical li button.fa-pull-left.toctree-expand{margin-right:.3em}.fa-pull-right.icon,.fa.fa-pull-right,.rst-content .code-block-caption .fa-pull-right.headerlink,.rst-content .eqno .fa-pull-right.headerlink,.rst-content .fa-pull-right.admonition-title,.rst-content code.download span.fa-pull-right:first-child,.rst-content dl dt .fa-pull-right.headerlink,.rst-content h1 .fa-pull-right.headerlink,.rst-content h2 .fa-pull-right.headerlink,.rst-content h3 .fa-pull-right.headerlink,.rst-content h4 .fa-pull-right.headerlink,.rst-content h5 .fa-pull-right.headerlink,.rst-content h6 .fa-pull-right.headerlink,.rst-content p .fa-pull-right.headerlink,.rst-content table>caption .fa-pull-right.headerlink,.rst-content tt.download span.fa-pull-right:first-child,.wy-menu-vertical li.current>a button.fa-pull-right.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-right.toctree-expand,.wy-menu-vertical li button.fa-pull-right.toctree-expand{margin-left:.3em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left,.pull-left.icon,.rst-content .code-block-caption .pull-left.headerlink,.rst-content .eqno .pull-left.headerlink,.rst-content .pull-left.admonition-title,.rst-content code.download span.pull-left:first-child,.rst-content dl dt .pull-left.headerlink,.rst-content h1 .pull-left.headerlink,.rst-content h2 .pull-left.headerlink,.rst-content h3 .pull-left.headerlink,.rst-content h4 .pull-left.headerlink,.rst-content h5 .pull-left.headerlink,.rst-content h6 .pull-left.headerlink,.rst-content p .pull-left.headerlink,.rst-content table>caption .pull-left.headerlink,.rst-content tt.download span.pull-left:first-child,.wy-menu-vertical li.current>a button.pull-left.toctree-expand,.wy-menu-vertical li.on a button.pull-left.toctree-expand,.wy-menu-vertical li button.pull-left.toctree-expand{margin-right:.3em}.fa.pull-right,.pull-right.icon,.rst-content .code-block-caption .pull-right.headerlink,.rst-content .eqno .pull-right.headerlink,.rst-content .pull-right.admonition-title,.rst-content code.download span.pull-right:first-child,.rst-content dl dt .pull-right.headerlink,.rst-content h1 .pull-right.headerlink,.rst-content h2 .pull-right.headerlink,.rst-content h3 .pull-right.headerlink,.rst-content h4 .pull-right.headerlink,.rst-content h5 .pull-right.headerlink,.rst-content h6 .pull-right.headerlink,.rst-content p .pull-right.headerlink,.rst-content table>caption .pull-right.headerlink,.rst-content tt.download span.pull-right:first-child,.wy-menu-vertical li.current>a button.pull-right.toctree-expand,.wy-menu-vertical li.on a button.pull-right.toctree-expand,.wy-menu-vertical li button.pull-right.toctree-expand{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s linear infinite;animation:fa-spin 2s linear infinite}.fa-pulse{-webkit-animation:fa-spin 1s steps(8) infinite;animation:fa-spin 1s steps(8) infinite}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scaleX(-1);-ms-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scaleY(-1);-ms-transform:scaleY(-1);transform:scaleY(-1)}:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:""}.fa-music:before{content:""}.fa-search:before,.icon-search:before{content:""}.fa-envelope-o:before{content:""}.fa-heart:before{content:""}.fa-star:before{content:""}.fa-star-o:before{content:""}.fa-user:before{content:""}.fa-film:before{content:""}.fa-th-large:before{content:""}.fa-th:before{content:""}.fa-th-list:before{content:""}.fa-check:before{content:""}.fa-close:before,.fa-remove:before,.fa-times:before{content:""}.fa-search-plus:before{content:""}.fa-search-minus:before{content:""}.fa-power-off:before{content:""}.fa-signal:before{content:""}.fa-cog:before,.fa-gear:before{content:""}.fa-trash-o:before{content:""}.fa-home:before,.icon-home:before{content:""}.fa-file-o:before{content:""}.fa-clock-o:before{content:""}.fa-road:before{content:""}.fa-download:before,.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{content:""}.fa-arrow-circle-o-down:before{content:""}.fa-arrow-circle-o-up:before{content:""}.fa-inbox:before{content:""}.fa-play-circle-o:before{content:""}.fa-repeat:before,.fa-rotate-right:before{content:""}.fa-refresh:before{content:""}.fa-list-alt:before{content:""}.fa-lock:before{content:""}.fa-flag:before{content:""}.fa-headphones:before{content:""}.fa-volume-off:before{content:""}.fa-volume-down:before{content:""}.fa-volume-up:before{content:""}.fa-qrcode:before{content:""}.fa-barcode:before{content:""}.fa-tag:before{content:""}.fa-tags:before{content:""}.fa-book:before,.icon-book:before{content:""}.fa-bookmark:before{content:""}.fa-print:before{content:""}.fa-camera:before{content:""}.fa-font:before{content:""}.fa-bold:before{content:""}.fa-italic:before{content:""}.fa-text-height:before{content:""}.fa-text-width:before{content:""}.fa-align-left:before{content:""}.fa-align-center:before{content:""}.fa-align-right:before{content:""}.fa-align-justify:before{content:""}.fa-list:before{content:""}.fa-dedent:before,.fa-outdent:before{content:""}.fa-indent:before{content:""}.fa-video-camera:before{content:""}.fa-image:before,.fa-photo:before,.fa-picture-o:before{content:""}.fa-pencil:before{content:""}.fa-map-marker:before{content:""}.fa-adjust:before{content:""}.fa-tint:before{content:""}.fa-edit:before,.fa-pencil-square-o:before{content:""}.fa-share-square-o:before{content:""}.fa-check-square-o:before{content:""}.fa-arrows:before{content:""}.fa-step-backward:before{content:""}.fa-fast-backward:before{content:""}.fa-backward:before{content:""}.fa-play:before{content:""}.fa-pause:before{content:""}.fa-stop:before{content:""}.fa-forward:before{content:""}.fa-fast-forward:before{content:""}.fa-step-forward:before{content:""}.fa-eject:before{content:""}.fa-chevron-left:before{content:""}.fa-chevron-right:before{content:""}.fa-plus-circle:before{content:""}.fa-minus-circle:before{content:""}.fa-times-circle:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:""}.fa-check-circle:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:""}.fa-question-circle:before{content:""}.fa-info-circle:before{content:""}.fa-crosshairs:before{content:""}.fa-times-circle-o:before{content:""}.fa-check-circle-o:before{content:""}.fa-ban:before{content:""}.fa-arrow-left:before{content:""}.fa-arrow-right:before{content:""}.fa-arrow-up:before{content:""}.fa-arrow-down:before{content:""}.fa-mail-forward:before,.fa-share:before{content:""}.fa-expand:before{content:""}.fa-compress:before{content:""}.fa-plus:before{content:""}.fa-minus:before{content:""}.fa-asterisk:before{content:""}.fa-exclamation-circle:before,.rst-content .admonition-title:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before{content:""}.fa-gift:before{content:""}.fa-leaf:before{content:""}.fa-fire:before,.icon-fire:before{content:""}.fa-eye:before{content:""}.fa-eye-slash:before{content:""}.fa-exclamation-triangle:before,.fa-warning:before{content:""}.fa-plane:before{content:""}.fa-calendar:before{content:""}.fa-random:before{content:""}.fa-comment:before{content:""}.fa-magnet:before{content:""}.fa-chevron-up:before{content:""}.fa-chevron-down:before{content:""}.fa-retweet:before{content:""}.fa-shopping-cart:before{content:""}.fa-folder:before{content:""}.fa-folder-open:before{content:""}.fa-arrows-v:before{content:""}.fa-arrows-h:before{content:""}.fa-bar-chart-o:before,.fa-bar-chart:before{content:""}.fa-twitter-square:before{content:""}.fa-facebook-square:before{content:""}.fa-camera-retro:before{content:""}.fa-key:before{content:""}.fa-cogs:before,.fa-gears:before{content:""}.fa-comments:before{content:""}.fa-thumbs-o-up:before{content:""}.fa-thumbs-o-down:before{content:""}.fa-star-half:before{content:""}.fa-heart-o:before{content:""}.fa-sign-out:before{content:""}.fa-linkedin-square:before{content:""}.fa-thumb-tack:before{content:""}.fa-external-link:before{content:""}.fa-sign-in:before{content:""}.fa-trophy:before{content:""}.fa-github-square:before{content:""}.fa-upload:before{content:""}.fa-lemon-o:before{content:""}.fa-phone:before{content:""}.fa-square-o:before{content:""}.fa-bookmark-o:before{content:""}.fa-phone-square:before{content:""}.fa-twitter:before{content:""}.fa-facebook-f:before,.fa-facebook:before{content:""}.fa-github:before,.icon-github:before{content:""}.fa-unlock:before{content:""}.fa-credit-card:before{content:""}.fa-feed:before,.fa-rss:before{content:""}.fa-hdd-o:before{content:""}.fa-bullhorn:before{content:""}.fa-bell:before{content:""}.fa-certificate:before{content:""}.fa-hand-o-right:before{content:""}.fa-hand-o-left:before{content:""}.fa-hand-o-up:before{content:""}.fa-hand-o-down:before{content:""}.fa-arrow-circle-left:before,.icon-circle-arrow-left:before{content:""}.fa-arrow-circle-right:before,.icon-circle-arrow-right:before{content:""}.fa-arrow-circle-up:before{content:""}.fa-arrow-circle-down:before{content:""}.fa-globe:before{content:""}.fa-wrench:before{content:""}.fa-tasks:before{content:""}.fa-filter:before{content:""}.fa-briefcase:before{content:""}.fa-arrows-alt:before{content:""}.fa-group:before,.fa-users:before{content:""}.fa-chain:before,.fa-link:before,.icon-link:before{content:""}.fa-cloud:before{content:""}.fa-flask:before{content:""}.fa-cut:before,.fa-scissors:before{content:""}.fa-copy:before,.fa-files-o:before{content:""}.fa-paperclip:before{content:""}.fa-floppy-o:before,.fa-save:before{content:""}.fa-square:before{content:""}.fa-bars:before,.fa-navicon:before,.fa-reorder:before{content:""}.fa-list-ul:before{content:""}.fa-list-ol:before{content:""}.fa-strikethrough:before{content:""}.fa-underline:before{content:""}.fa-table:before{content:""}.fa-magic:before{content:""}.fa-truck:before{content:""}.fa-pinterest:before{content:""}.fa-pinterest-square:before{content:""}.fa-google-plus-square:before{content:""}.fa-google-plus:before{content:""}.fa-money:before{content:""}.fa-caret-down:before,.icon-caret-down:before,.wy-dropdown .caret:before{content:""}.fa-caret-up:before{content:""}.fa-caret-left:before{content:""}.fa-caret-right:before{content:""}.fa-columns:before{content:""}.fa-sort:before,.fa-unsorted:before{content:""}.fa-sort-desc:before,.fa-sort-down:before{content:""}.fa-sort-asc:before,.fa-sort-up:before{content:""}.fa-envelope:before{content:""}.fa-linkedin:before{content:""}.fa-rotate-left:before,.fa-undo:before{content:""}.fa-gavel:before,.fa-legal:before{content:""}.fa-dashboard:before,.fa-tachometer:before{content:""}.fa-comment-o:before{content:""}.fa-comments-o:before{content:""}.fa-bolt:before,.fa-flash:before{content:""}.fa-sitemap:before{content:""}.fa-umbrella:before{content:""}.fa-clipboard:before,.fa-paste:before{content:""}.fa-lightbulb-o:before{content:""}.fa-exchange:before{content:""}.fa-cloud-download:before{content:""}.fa-cloud-upload:before{content:""}.fa-user-md:before{content:""}.fa-stethoscope:before{content:""}.fa-suitcase:before{content:""}.fa-bell-o:before{content:""}.fa-coffee:before{content:""}.fa-cutlery:before{content:""}.fa-file-text-o:before{content:""}.fa-building-o:before{content:""}.fa-hospital-o:before{content:""}.fa-ambulance:before{content:""}.fa-medkit:before{content:""}.fa-fighter-jet:before{content:""}.fa-beer:before{content:""}.fa-h-square:before{content:""}.fa-plus-square:before{content:""}.fa-angle-double-left:before{content:""}.fa-angle-double-right:before{content:""}.fa-angle-double-up:before{content:""}.fa-angle-double-down:before{content:""}.fa-angle-left:before{content:""}.fa-angle-right:before{content:""}.fa-angle-up:before{content:""}.fa-angle-down:before{content:""}.fa-desktop:before{content:""}.fa-laptop:before{content:""}.fa-tablet:before{content:""}.fa-mobile-phone:before,.fa-mobile:before{content:""}.fa-circle-o:before{content:""}.fa-quote-left:before{content:""}.fa-quote-right:before{content:""}.fa-spinner:before{content:""}.fa-circle:before{content:""}.fa-mail-reply:before,.fa-reply:before{content:""}.fa-github-alt:before{content:""}.fa-folder-o:before{content:""}.fa-folder-open-o:before{content:""}.fa-smile-o:before{content:""}.fa-frown-o:before{content:""}.fa-meh-o:before{content:""}.fa-gamepad:before{content:""}.fa-keyboard-o:before{content:""}.fa-flag-o:before{content:""}.fa-flag-checkered:before{content:""}.fa-terminal:before{content:""}.fa-code:before{content:""}.fa-mail-reply-all:before,.fa-reply-all:before{content:""}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:""}.fa-location-arrow:before{content:""}.fa-crop:before{content:""}.fa-code-fork:before{content:""}.fa-chain-broken:before,.fa-unlink:before{content:""}.fa-question:before{content:""}.fa-info:before{content:""}.fa-exclamation:before{content:""}.fa-superscript:before{content:""}.fa-subscript:before{content:""}.fa-eraser:before{content:""}.fa-puzzle-piece:before{content:""}.fa-microphone:before{content:""}.fa-microphone-slash:before{content:""}.fa-shield:before{content:""}.fa-calendar-o:before{content:""}.fa-fire-extinguisher:before{content:""}.fa-rocket:before{content:""}.fa-maxcdn:before{content:""}.fa-chevron-circle-left:before{content:""}.fa-chevron-circle-right:before{content:""}.fa-chevron-circle-up:before{content:""}.fa-chevron-circle-down:before{content:""}.fa-html5:before{content:""}.fa-css3:before{content:""}.fa-anchor:before{content:""}.fa-unlock-alt:before{content:""}.fa-bullseye:before{content:""}.fa-ellipsis-h:before{content:""}.fa-ellipsis-v:before{content:""}.fa-rss-square:before{content:""}.fa-play-circle:before{content:""}.fa-ticket:before{content:""}.fa-minus-square:before{content:""}.fa-minus-square-o:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before{content:""}.fa-level-up:before{content:""}.fa-level-down:before{content:""}.fa-check-square:before{content:""}.fa-pencil-square:before{content:""}.fa-external-link-square:before{content:""}.fa-share-square:before{content:""}.fa-compass:before{content:""}.fa-caret-square-o-down:before,.fa-toggle-down:before{content:""}.fa-caret-square-o-up:before,.fa-toggle-up:before{content:""}.fa-caret-square-o-right:before,.fa-toggle-right:before{content:""}.fa-eur:before,.fa-euro:before{content:""}.fa-gbp:before{content:""}.fa-dollar:before,.fa-usd:before{content:""}.fa-inr:before,.fa-rupee:before{content:""}.fa-cny:before,.fa-jpy:before,.fa-rmb:before,.fa-yen:before{content:""}.fa-rouble:before,.fa-rub:before,.fa-ruble:before{content:""}.fa-krw:before,.fa-won:before{content:""}.fa-bitcoin:before,.fa-btc:before{content:""}.fa-file:before{content:""}.fa-file-text:before{content:""}.fa-sort-alpha-asc:before{content:""}.fa-sort-alpha-desc:before{content:""}.fa-sort-amount-asc:before{content:""}.fa-sort-amount-desc:before{content:""}.fa-sort-numeric-asc:before{content:""}.fa-sort-numeric-desc:before{content:""}.fa-thumbs-up:before{content:""}.fa-thumbs-down:before{content:""}.fa-youtube-square:before{content:""}.fa-youtube:before{content:""}.fa-xing:before{content:""}.fa-xing-square:before{content:""}.fa-youtube-play:before{content:""}.fa-dropbox:before{content:""}.fa-stack-overflow:before{content:""}.fa-instagram:before{content:""}.fa-flickr:before{content:""}.fa-adn:before{content:""}.fa-bitbucket:before,.icon-bitbucket:before{content:""}.fa-bitbucket-square:before{content:""}.fa-tumblr:before{content:""}.fa-tumblr-square:before{content:""}.fa-long-arrow-down:before{content:""}.fa-long-arrow-up:before{content:""}.fa-long-arrow-left:before{content:""}.fa-long-arrow-right:before{content:""}.fa-apple:before{content:""}.fa-windows:before{content:""}.fa-android:before{content:""}.fa-linux:before{content:""}.fa-dribbble:before{content:""}.fa-skype:before{content:""}.fa-foursquare:before{content:""}.fa-trello:before{content:""}.fa-female:before{content:""}.fa-male:before{content:""}.fa-gittip:before,.fa-gratipay:before{content:""}.fa-sun-o:before{content:""}.fa-moon-o:before{content:""}.fa-archive:before{content:""}.fa-bug:before{content:""}.fa-vk:before{content:""}.fa-weibo:before{content:""}.fa-renren:before{content:""}.fa-pagelines:before{content:""}.fa-stack-exchange:before{content:""}.fa-arrow-circle-o-right:before{content:""}.fa-arrow-circle-o-left:before{content:""}.fa-caret-square-o-left:before,.fa-toggle-left:before{content:""}.fa-dot-circle-o:before{content:""}.fa-wheelchair:before{content:""}.fa-vimeo-square:before{content:""}.fa-try:before,.fa-turkish-lira:before{content:""}.fa-plus-square-o:before,.wy-menu-vertical li button.toctree-expand:before{content:""}.fa-space-shuttle:before{content:""}.fa-slack:before{content:""}.fa-envelope-square:before{content:""}.fa-wordpress:before{content:""}.fa-openid:before{content:""}.fa-bank:before,.fa-institution:before,.fa-university:before{content:""}.fa-graduation-cap:before,.fa-mortar-board:before{content:""}.fa-yahoo:before{content:""}.fa-google:before{content:""}.fa-reddit:before{content:""}.fa-reddit-square:before{content:""}.fa-stumbleupon-circle:before{content:""}.fa-stumbleupon:before{content:""}.fa-delicious:before{content:""}.fa-digg:before{content:""}.fa-pied-piper-pp:before{content:""}.fa-pied-piper-alt:before{content:""}.fa-drupal:before{content:""}.fa-joomla:before{content:""}.fa-language:before{content:""}.fa-fax:before{content:""}.fa-building:before{content:""}.fa-child:before{content:""}.fa-paw:before{content:""}.fa-spoon:before{content:""}.fa-cube:before{content:""}.fa-cubes:before{content:""}.fa-behance:before{content:""}.fa-behance-square:before{content:""}.fa-steam:before{content:""}.fa-steam-square:before{content:""}.fa-recycle:before{content:""}.fa-automobile:before,.fa-car:before{content:""}.fa-cab:before,.fa-taxi:before{content:""}.fa-tree:before{content:""}.fa-spotify:before{content:""}.fa-deviantart:before{content:""}.fa-soundcloud:before{content:""}.fa-database:before{content:""}.fa-file-pdf-o:before{content:""}.fa-file-word-o:before{content:""}.fa-file-excel-o:before{content:""}.fa-file-powerpoint-o:before{content:""}.fa-file-image-o:before,.fa-file-photo-o:before,.fa-file-picture-o:before{content:""}.fa-file-archive-o:before,.fa-file-zip-o:before{content:""}.fa-file-audio-o:before,.fa-file-sound-o:before{content:""}.fa-file-movie-o:before,.fa-file-video-o:before{content:""}.fa-file-code-o:before{content:""}.fa-vine:before{content:""}.fa-codepen:before{content:""}.fa-jsfiddle:before{content:""}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-ring:before,.fa-life-saver:before,.fa-support:before{content:""}.fa-circle-o-notch:before{content:""}.fa-ra:before,.fa-rebel:before,.fa-resistance:before{content:""}.fa-empire:before,.fa-ge:before{content:""}.fa-git-square:before{content:""}.fa-git:before{content:""}.fa-hacker-news:before,.fa-y-combinator-square:before,.fa-yc-square:before{content:""}.fa-tencent-weibo:before{content:""}.fa-qq:before{content:""}.fa-wechat:before,.fa-weixin:before{content:""}.fa-paper-plane:before,.fa-send:before{content:""}.fa-paper-plane-o:before,.fa-send-o:before{content:""}.fa-history:before{content:""}.fa-circle-thin:before{content:""}.fa-header:before{content:""}.fa-paragraph:before{content:""}.fa-sliders:before{content:""}.fa-share-alt:before{content:""}.fa-share-alt-square:before{content:""}.fa-bomb:before{content:""}.fa-futbol-o:before,.fa-soccer-ball-o:before{content:""}.fa-tty:before{content:""}.fa-binoculars:before{content:""}.fa-plug:before{content:""}.fa-slideshare:before{content:""}.fa-twitch:before{content:""}.fa-yelp:before{content:""}.fa-newspaper-o:before{content:""}.fa-wifi:before{content:""}.fa-calculator:before{content:""}.fa-paypal:before{content:""}.fa-google-wallet:before{content:""}.fa-cc-visa:before{content:""}.fa-cc-mastercard:before{content:""}.fa-cc-discover:before{content:""}.fa-cc-amex:before{content:""}.fa-cc-paypal:before{content:""}.fa-cc-stripe:before{content:""}.fa-bell-slash:before{content:""}.fa-bell-slash-o:before{content:""}.fa-trash:before{content:""}.fa-copyright:before{content:""}.fa-at:before{content:""}.fa-eyedropper:before{content:""}.fa-paint-brush:before{content:""}.fa-birthday-cake:before{content:""}.fa-area-chart:before{content:""}.fa-pie-chart:before{content:""}.fa-line-chart:before{content:""}.fa-lastfm:before{content:""}.fa-lastfm-square:before{content:""}.fa-toggle-off:before{content:""}.fa-toggle-on:before{content:""}.fa-bicycle:before{content:""}.fa-bus:before{content:""}.fa-ioxhost:before{content:""}.fa-angellist:before{content:""}.fa-cc:before{content:""}.fa-ils:before,.fa-shekel:before,.fa-sheqel:before{content:""}.fa-meanpath:before{content:""}.fa-buysellads:before{content:""}.fa-connectdevelop:before{content:""}.fa-dashcube:before{content:""}.fa-forumbee:before{content:""}.fa-leanpub:before{content:""}.fa-sellsy:before{content:""}.fa-shirtsinbulk:before{content:""}.fa-simplybuilt:before{content:""}.fa-skyatlas:before{content:""}.fa-cart-plus:before{content:""}.fa-cart-arrow-down:before{content:""}.fa-diamond:before{content:""}.fa-ship:before{content:""}.fa-user-secret:before{content:""}.fa-motorcycle:before{content:""}.fa-street-view:before{content:""}.fa-heartbeat:before{content:""}.fa-venus:before{content:""}.fa-mars:before{content:""}.fa-mercury:before{content:""}.fa-intersex:before,.fa-transgender:before{content:""}.fa-transgender-alt:before{content:""}.fa-venus-double:before{content:""}.fa-mars-double:before{content:""}.fa-venus-mars:before{content:""}.fa-mars-stroke:before{content:""}.fa-mars-stroke-v:before{content:""}.fa-mars-stroke-h:before{content:""}.fa-neuter:before{content:""}.fa-genderless:before{content:""}.fa-facebook-official:before{content:""}.fa-pinterest-p:before{content:""}.fa-whatsapp:before{content:""}.fa-server:before{content:""}.fa-user-plus:before{content:""}.fa-user-times:before{content:""}.fa-bed:before,.fa-hotel:before{content:""}.fa-viacoin:before{content:""}.fa-train:before{content:""}.fa-subway:before{content:""}.fa-medium:before{content:""}.fa-y-combinator:before,.fa-yc:before{content:""}.fa-optin-monster:before{content:""}.fa-opencart:before{content:""}.fa-expeditedssl:before{content:""}.fa-battery-4:before,.fa-battery-full:before,.fa-battery:before{content:""}.fa-battery-3:before,.fa-battery-three-quarters:before{content:""}.fa-battery-2:before,.fa-battery-half:before{content:""}.fa-battery-1:before,.fa-battery-quarter:before{content:""}.fa-battery-0:before,.fa-battery-empty:before{content:""}.fa-mouse-pointer:before{content:""}.fa-i-cursor:before{content:""}.fa-object-group:before{content:""}.fa-object-ungroup:before{content:""}.fa-sticky-note:before{content:""}.fa-sticky-note-o:before{content:""}.fa-cc-jcb:before{content:""}.fa-cc-diners-club:before{content:""}.fa-clone:before{content:""}.fa-balance-scale:before{content:""}.fa-hourglass-o:before{content:""}.fa-hourglass-1:before,.fa-hourglass-start:before{content:""}.fa-hourglass-2:before,.fa-hourglass-half:before{content:""}.fa-hourglass-3:before,.fa-hourglass-end:before{content:""}.fa-hourglass:before{content:""}.fa-hand-grab-o:before,.fa-hand-rock-o:before{content:""}.fa-hand-paper-o:before,.fa-hand-stop-o:before{content:""}.fa-hand-scissors-o:before{content:""}.fa-hand-lizard-o:before{content:""}.fa-hand-spock-o:before{content:""}.fa-hand-pointer-o:before{content:""}.fa-hand-peace-o:before{content:""}.fa-trademark:before{content:""}.fa-registered:before{content:""}.fa-creative-commons:before{content:""}.fa-gg:before{content:""}.fa-gg-circle:before{content:""}.fa-tripadvisor:before{content:""}.fa-odnoklassniki:before{content:""}.fa-odnoklassniki-square:before{content:""}.fa-get-pocket:before{content:""}.fa-wikipedia-w:before{content:""}.fa-safari:before{content:""}.fa-chrome:before{content:""}.fa-firefox:before{content:""}.fa-opera:before{content:""}.fa-internet-explorer:before{content:""}.fa-television:before,.fa-tv:before{content:""}.fa-contao:before{content:""}.fa-500px:before{content:""}.fa-amazon:before{content:""}.fa-calendar-plus-o:before{content:""}.fa-calendar-minus-o:before{content:""}.fa-calendar-times-o:before{content:""}.fa-calendar-check-o:before{content:""}.fa-industry:before{content:""}.fa-map-pin:before{content:""}.fa-map-signs:before{content:""}.fa-map-o:before{content:""}.fa-map:before{content:""}.fa-commenting:before{content:""}.fa-commenting-o:before{content:""}.fa-houzz:before{content:""}.fa-vimeo:before{content:""}.fa-black-tie:before{content:""}.fa-fonticons:before{content:""}.fa-reddit-alien:before{content:""}.fa-edge:before{content:""}.fa-credit-card-alt:before{content:""}.fa-codiepie:before{content:""}.fa-modx:before{content:""}.fa-fort-awesome:before{content:""}.fa-usb:before{content:""}.fa-product-hunt:before{content:""}.fa-mixcloud:before{content:""}.fa-scribd:before{content:""}.fa-pause-circle:before{content:""}.fa-pause-circle-o:before{content:""}.fa-stop-circle:before{content:""}.fa-stop-circle-o:before{content:""}.fa-shopping-bag:before{content:""}.fa-shopping-basket:before{content:""}.fa-hashtag:before{content:""}.fa-bluetooth:before{content:""}.fa-bluetooth-b:before{content:""}.fa-percent:before{content:""}.fa-gitlab:before,.icon-gitlab:before{content:""}.fa-wpbeginner:before{content:""}.fa-wpforms:before{content:""}.fa-envira:before{content:""}.fa-universal-access:before{content:""}.fa-wheelchair-alt:before{content:""}.fa-question-circle-o:before{content:""}.fa-blind:before{content:""}.fa-audio-description:before{content:""}.fa-volume-control-phone:before{content:""}.fa-braille:before{content:""}.fa-assistive-listening-systems:before{content:""}.fa-american-sign-language-interpreting:before,.fa-asl-interpreting:before{content:""}.fa-deaf:before,.fa-deafness:before,.fa-hard-of-hearing:before{content:""}.fa-glide:before{content:""}.fa-glide-g:before{content:""}.fa-sign-language:before,.fa-signing:before{content:""}.fa-low-vision:before{content:""}.fa-viadeo:before{content:""}.fa-viadeo-square:before{content:""}.fa-snapchat:before{content:""}.fa-snapchat-ghost:before{content:""}.fa-snapchat-square:before{content:""}.fa-pied-piper:before{content:""}.fa-first-order:before{content:""}.fa-yoast:before{content:""}.fa-themeisle:before{content:""}.fa-google-plus-circle:before,.fa-google-plus-official:before{content:""}.fa-fa:before,.fa-font-awesome:before{content:""}.fa-handshake-o:before{content:""}.fa-envelope-open:before{content:""}.fa-envelope-open-o:before{content:""}.fa-linode:before{content:""}.fa-address-book:before{content:""}.fa-address-book-o:before{content:""}.fa-address-card:before,.fa-vcard:before{content:""}.fa-address-card-o:before,.fa-vcard-o:before{content:""}.fa-user-circle:before{content:""}.fa-user-circle-o:before{content:""}.fa-user-o:before{content:""}.fa-id-badge:before{content:""}.fa-drivers-license:before,.fa-id-card:before{content:""}.fa-drivers-license-o:before,.fa-id-card-o:before{content:""}.fa-quora:before{content:""}.fa-free-code-camp:before{content:""}.fa-telegram:before{content:""}.fa-thermometer-4:before,.fa-thermometer-full:before,.fa-thermometer:before{content:""}.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:""}.fa-thermometer-2:before,.fa-thermometer-half:before{content:""}.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:""}.fa-thermometer-0:before,.fa-thermometer-empty:before{content:""}.fa-shower:before{content:""}.fa-bath:before,.fa-bathtub:before,.fa-s15:before{content:""}.fa-podcast:before{content:""}.fa-window-maximize:before{content:""}.fa-window-minimize:before{content:""}.fa-window-restore:before{content:""}.fa-times-rectangle:before,.fa-window-close:before{content:""}.fa-times-rectangle-o:before,.fa-window-close-o:before{content:""}.fa-bandcamp:before{content:""}.fa-grav:before{content:""}.fa-etsy:before{content:""}.fa-imdb:before{content:""}.fa-ravelry:before{content:""}.fa-eercast:before{content:""}.fa-microchip:before{content:""}.fa-snowflake-o:before{content:""}.fa-superpowers:before{content:""}.fa-wpexplorer:before{content:""}.fa-meetup:before{content:""}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-dropdown .caret,.wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{font-family:inherit}.fa:before,.icon:before,.rst-content .admonition-title:before,.rst-content .code-block-caption .headerlink:before,.rst-content .eqno .headerlink:before,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before{font-family:FontAwesome;display:inline-block;font-style:normal;font-weight:400;line-height:1;text-decoration:inherit}.rst-content .code-block-caption a .headerlink,.rst-content .eqno a .headerlink,.rst-content a .admonition-title,.rst-content code.download a span:first-child,.rst-content dl dt a .headerlink,.rst-content h1 a .headerlink,.rst-content h2 a .headerlink,.rst-content h3 a .headerlink,.rst-content h4 a .headerlink,.rst-content h5 a .headerlink,.rst-content h6 a .headerlink,.rst-content p.caption a .headerlink,.rst-content p a .headerlink,.rst-content table>caption a .headerlink,.rst-content tt.download a span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li a button.toctree-expand,a .fa,a .icon,a .rst-content .admonition-title,a .rst-content .code-block-caption .headerlink,a .rst-content .eqno .headerlink,a .rst-content code.download span:first-child,a .rst-content dl dt .headerlink,a .rst-content h1 .headerlink,a .rst-content h2 .headerlink,a .rst-content h3 .headerlink,a .rst-content h4 .headerlink,a .rst-content h5 .headerlink,a .rst-content h6 .headerlink,a .rst-content p.caption .headerlink,a .rst-content p .headerlink,a .rst-content table>caption .headerlink,a .rst-content tt.download span:first-child,a .wy-menu-vertical li button.toctree-expand{display:inline-block;text-decoration:inherit}.btn .fa,.btn .icon,.btn .rst-content .admonition-title,.btn .rst-content .code-block-caption .headerlink,.btn .rst-content .eqno .headerlink,.btn .rst-content code.download span:first-child,.btn .rst-content dl dt .headerlink,.btn .rst-content h1 .headerlink,.btn .rst-content h2 .headerlink,.btn .rst-content h3 .headerlink,.btn .rst-content h4 .headerlink,.btn .rst-content h5 .headerlink,.btn .rst-content h6 .headerlink,.btn .rst-content p .headerlink,.btn .rst-content table>caption .headerlink,.btn .rst-content tt.download span:first-child,.btn .wy-menu-vertical li.current>a button.toctree-expand,.btn .wy-menu-vertical li.on a button.toctree-expand,.btn .wy-menu-vertical li button.toctree-expand,.nav .fa,.nav .icon,.nav .rst-content .admonition-title,.nav .rst-content .code-block-caption .headerlink,.nav .rst-content .eqno .headerlink,.nav .rst-content code.download span:first-child,.nav .rst-content dl dt .headerlink,.nav .rst-content h1 .headerlink,.nav .rst-content h2 .headerlink,.nav .rst-content h3 .headerlink,.nav .rst-content h4 .headerlink,.nav .rst-content h5 .headerlink,.nav .rst-content h6 .headerlink,.nav .rst-content p .headerlink,.nav .rst-content table>caption .headerlink,.nav .rst-content tt.download span:first-child,.nav .wy-menu-vertical li.current>a button.toctree-expand,.nav .wy-menu-vertical li.on a button.toctree-expand,.nav .wy-menu-vertical li button.toctree-expand,.rst-content .btn .admonition-title,.rst-content .code-block-caption .btn .headerlink,.rst-content .code-block-caption .nav .headerlink,.rst-content .eqno .btn .headerlink,.rst-content .eqno .nav .headerlink,.rst-content .nav .admonition-title,.rst-content code.download .btn span:first-child,.rst-content code.download .nav span:first-child,.rst-content dl dt .btn .headerlink,.rst-content dl dt .nav .headerlink,.rst-content h1 .btn .headerlink,.rst-content h1 .nav .headerlink,.rst-content h2 .btn .headerlink,.rst-content h2 .nav .headerlink,.rst-content h3 .btn .headerlink,.rst-content h3 .nav .headerlink,.rst-content h4 .btn .headerlink,.rst-content h4 .nav .headerlink,.rst-content h5 .btn .headerlink,.rst-content h5 .nav .headerlink,.rst-content h6 .btn .headerlink,.rst-content h6 .nav .headerlink,.rst-content p .btn .headerlink,.rst-content p .nav .headerlink,.rst-content table>caption .btn .headerlink,.rst-content table>caption .nav .headerlink,.rst-content tt.download .btn span:first-child,.rst-content tt.download .nav span:first-child,.wy-menu-vertical li .btn button.toctree-expand,.wy-menu-vertical li.current>a .btn button.toctree-expand,.wy-menu-vertical li.current>a .nav button.toctree-expand,.wy-menu-vertical li .nav button.toctree-expand,.wy-menu-vertical li.on a .btn button.toctree-expand,.wy-menu-vertical li.on a .nav button.toctree-expand{display:inline}.btn .fa-large.icon,.btn .fa.fa-large,.btn .rst-content .code-block-caption .fa-large.headerlink,.btn .rst-content .eqno .fa-large.headerlink,.btn .rst-content .fa-large.admonition-title,.btn .rst-content code.download span.fa-large:first-child,.btn .rst-content dl dt .fa-large.headerlink,.btn .rst-content h1 .fa-large.headerlink,.btn .rst-content h2 .fa-large.headerlink,.btn .rst-content h3 .fa-large.headerlink,.btn .rst-content h4 .fa-large.headerlink,.btn .rst-content h5 .fa-large.headerlink,.btn .rst-content h6 .fa-large.headerlink,.btn .rst-content p .fa-large.headerlink,.btn .rst-content table>caption .fa-large.headerlink,.btn .rst-content tt.download span.fa-large:first-child,.btn .wy-menu-vertical li button.fa-large.toctree-expand,.nav .fa-large.icon,.nav .fa.fa-large,.nav .rst-content .code-block-caption .fa-large.headerlink,.nav .rst-content .eqno .fa-large.headerlink,.nav .rst-content .fa-large.admonition-title,.nav .rst-content code.download span.fa-large:first-child,.nav .rst-content dl dt .fa-large.headerlink,.nav .rst-content h1 .fa-large.headerlink,.nav .rst-content h2 .fa-large.headerlink,.nav .rst-content h3 .fa-large.headerlink,.nav .rst-content h4 .fa-large.headerlink,.nav .rst-content h5 .fa-large.headerlink,.nav .rst-content h6 .fa-large.headerlink,.nav .rst-content p .fa-large.headerlink,.nav .rst-content table>caption .fa-large.headerlink,.nav .rst-content tt.download span.fa-large:first-child,.nav .wy-menu-vertical li button.fa-large.toctree-expand,.rst-content .btn .fa-large.admonition-title,.rst-content .code-block-caption .btn .fa-large.headerlink,.rst-content .code-block-caption .nav .fa-large.headerlink,.rst-content .eqno .btn .fa-large.headerlink,.rst-content .eqno .nav .fa-large.headerlink,.rst-content .nav .fa-large.admonition-title,.rst-content code.download .btn span.fa-large:first-child,.rst-content code.download .nav span.fa-large:first-child,.rst-content dl dt .btn .fa-large.headerlink,.rst-content dl dt .nav .fa-large.headerlink,.rst-content h1 .btn .fa-large.headerlink,.rst-content h1 .nav .fa-large.headerlink,.rst-content h2 .btn .fa-large.headerlink,.rst-content h2 .nav .fa-large.headerlink,.rst-content h3 .btn .fa-large.headerlink,.rst-content h3 .nav .fa-large.headerlink,.rst-content h4 .btn .fa-large.headerlink,.rst-content h4 .nav .fa-large.headerlink,.rst-content h5 .btn .fa-large.headerlink,.rst-content h5 .nav .fa-large.headerlink,.rst-content h6 .btn .fa-large.headerlink,.rst-content h6 .nav .fa-large.headerlink,.rst-content p .btn .fa-large.headerlink,.rst-content p .nav .fa-large.headerlink,.rst-content table>caption .btn .fa-large.headerlink,.rst-content table>caption .nav .fa-large.headerlink,.rst-content tt.download .btn span.fa-large:first-child,.rst-content tt.download .nav span.fa-large:first-child,.wy-menu-vertical li .btn button.fa-large.toctree-expand,.wy-menu-vertical li .nav button.fa-large.toctree-expand{line-height:.9em}.btn .fa-spin.icon,.btn .fa.fa-spin,.btn .rst-content .code-block-caption .fa-spin.headerlink,.btn .rst-content .eqno .fa-spin.headerlink,.btn .rst-content .fa-spin.admonition-title,.btn .rst-content code.download span.fa-spin:first-child,.btn .rst-content dl dt .fa-spin.headerlink,.btn .rst-content h1 .fa-spin.headerlink,.btn .rst-content h2 .fa-spin.headerlink,.btn .rst-content h3 .fa-spin.headerlink,.btn .rst-content h4 .fa-spin.headerlink,.btn .rst-content h5 .fa-spin.headerlink,.btn .rst-content h6 .fa-spin.headerlink,.btn .rst-content p .fa-spin.headerlink,.btn .rst-content table>caption .fa-spin.headerlink,.btn .rst-content tt.download span.fa-spin:first-child,.btn .wy-menu-vertical li button.fa-spin.toctree-expand,.nav .fa-spin.icon,.nav .fa.fa-spin,.nav .rst-content .code-block-caption .fa-spin.headerlink,.nav .rst-content .eqno .fa-spin.headerlink,.nav .rst-content .fa-spin.admonition-title,.nav .rst-content code.download span.fa-spin:first-child,.nav .rst-content dl dt .fa-spin.headerlink,.nav .rst-content h1 .fa-spin.headerlink,.nav .rst-content h2 .fa-spin.headerlink,.nav .rst-content h3 .fa-spin.headerlink,.nav .rst-content h4 .fa-spin.headerlink,.nav .rst-content h5 .fa-spin.headerlink,.nav .rst-content h6 .fa-spin.headerlink,.nav .rst-content p .fa-spin.headerlink,.nav .rst-content table>caption .fa-spin.headerlink,.nav .rst-content tt.download span.fa-spin:first-child,.nav .wy-menu-vertical li button.fa-spin.toctree-expand,.rst-content .btn .fa-spin.admonition-title,.rst-content .code-block-caption .btn .fa-spin.headerlink,.rst-content .code-block-caption .nav .fa-spin.headerlink,.rst-content .eqno .btn .fa-spin.headerlink,.rst-content .eqno .nav .fa-spin.headerlink,.rst-content .nav .fa-spin.admonition-title,.rst-content code.download .btn span.fa-spin:first-child,.rst-content code.download .nav span.fa-spin:first-child,.rst-content dl dt .btn .fa-spin.headerlink,.rst-content dl dt .nav .fa-spin.headerlink,.rst-content h1 .btn .fa-spin.headerlink,.rst-content h1 .nav .fa-spin.headerlink,.rst-content h2 .btn .fa-spin.headerlink,.rst-content h2 .nav .fa-spin.headerlink,.rst-content h3 .btn .fa-spin.headerlink,.rst-content h3 .nav .fa-spin.headerlink,.rst-content h4 .btn .fa-spin.headerlink,.rst-content h4 .nav .fa-spin.headerlink,.rst-content h5 .btn .fa-spin.headerlink,.rst-content h5 .nav .fa-spin.headerlink,.rst-content h6 .btn .fa-spin.headerlink,.rst-content h6 .nav .fa-spin.headerlink,.rst-content p .btn .fa-spin.headerlink,.rst-content p .nav .fa-spin.headerlink,.rst-content table>caption .btn .fa-spin.headerlink,.rst-content table>caption .nav .fa-spin.headerlink,.rst-content tt.download .btn span.fa-spin:first-child,.rst-content tt.download .nav span.fa-spin:first-child,.wy-menu-vertical li .btn button.fa-spin.toctree-expand,.wy-menu-vertical li .nav button.fa-spin.toctree-expand{display:inline-block}.btn.fa:before,.btn.icon:before,.rst-content .btn.admonition-title:before,.rst-content .code-block-caption .btn.headerlink:before,.rst-content .eqno .btn.headerlink:before,.rst-content code.download span.btn:first-child:before,.rst-content dl dt .btn.headerlink:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content p .btn.headerlink:before,.rst-content table>caption .btn.headerlink:before,.rst-content tt.download span.btn:first-child:before,.wy-menu-vertical li button.btn.toctree-expand:before{opacity:.5;-webkit-transition:opacity .05s ease-in;-moz-transition:opacity .05s ease-in;transition:opacity .05s ease-in}.btn.fa:hover:before,.btn.icon:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content .code-block-caption .btn.headerlink:hover:before,.rst-content .eqno .btn.headerlink:hover:before,.rst-content code.download span.btn:first-child:hover:before,.rst-content dl dt .btn.headerlink:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content p .btn.headerlink:hover:before,.rst-content table>caption .btn.headerlink:hover:before,.rst-content tt.download span.btn:first-child:hover:before,.wy-menu-vertical li button.btn.toctree-expand:hover:before{opacity:1}.btn-mini .fa:before,.btn-mini .icon:before,.btn-mini .rst-content .admonition-title:before,.btn-mini .rst-content .code-block-caption .headerlink:before,.btn-mini .rst-content .eqno .headerlink:before,.btn-mini .rst-content code.download span:first-child:before,.btn-mini .rst-content dl dt .headerlink:before,.btn-mini .rst-content h1 .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.btn-mini .rst-content p .headerlink:before,.btn-mini .rst-content table>caption .headerlink:before,.btn-mini .rst-content tt.download span:first-child:before,.btn-mini .wy-menu-vertical li button.toctree-expand:before,.rst-content .btn-mini .admonition-title:before,.rst-content .code-block-caption .btn-mini .headerlink:before,.rst-content .eqno .btn-mini .headerlink:before,.rst-content code.download .btn-mini span:first-child:before,.rst-content dl dt .btn-mini .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.rst-content p .btn-mini .headerlink:before,.rst-content table>caption .btn-mini .headerlink:before,.rst-content tt.download .btn-mini span:first-child:before,.wy-menu-vertical li .btn-mini button.toctree-expand:before{font-size:14px;vertical-align:-15%}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.wy-alert{padding:12px;line-height:24px;margin-bottom:24px;background:#e7f2fa}.rst-content .admonition-title,.wy-alert-title{font-weight:700;display:block;color:#fff;background:#6ab0de;padding:6px 12px;margin:-12px -12px 12px}.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.admonition,.rst-content .wy-alert-danger.admonition-todo,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.seealso,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.wy-alert.wy-alert-danger{background:#fdf3f2}.rst-content .danger .admonition-title,.rst-content .danger .wy-alert-title,.rst-content .error .admonition-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.admonition-todo .admonition-title,.rst-content .wy-alert-danger.admonition-todo .wy-alert-title,.rst-content .wy-alert-danger.admonition .admonition-title,.rst-content .wy-alert-danger.admonition .wy-alert-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.seealso .admonition-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.wy-alert.wy-alert-danger .wy-alert-title{background:#f29f97}.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .warning,.rst-content .wy-alert-warning.admonition,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.note,.rst-content .wy-alert-warning.seealso,.rst-content .wy-alert-warning.tip,.wy-alert.wy-alert-warning{background:#ffedcc}.rst-content .admonition-todo .admonition-title,.rst-content .admonition-todo .wy-alert-title,.rst-content .attention .admonition-title,.rst-content .attention .wy-alert-title,.rst-content .caution .admonition-title,.rst-content .caution .wy-alert-title,.rst-content .warning .admonition-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.admonition .admonition-title,.rst-content .wy-alert-warning.admonition .wy-alert-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .wy-alert-warning.seealso .admonition-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.wy-alert.wy-alert-warning .wy-alert-title{background:#f0b37e}.rst-content .note,.rst-content .seealso,.rst-content .wy-alert-info.admonition,.rst-content .wy-alert-info.admonition-todo,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.wy-alert.wy-alert-info{background:#e7f2fa}.rst-content .note .admonition-title,.rst-content .note .wy-alert-title,.rst-content .seealso .admonition-title,.rst-content .seealso .wy-alert-title,.rst-content .wy-alert-info.admonition-todo .admonition-title,.rst-content .wy-alert-info.admonition-todo .wy-alert-title,.rst-content .wy-alert-info.admonition .admonition-title,.rst-content .wy-alert-info.admonition .wy-alert-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.wy-alert.wy-alert-info .wy-alert-title{background:#6ab0de}.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.admonition,.rst-content .wy-alert-success.admonition-todo,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.seealso,.rst-content .wy-alert-success.warning,.wy-alert.wy-alert-success{background:#dbfaf4}.rst-content .hint .admonition-title,.rst-content .hint .wy-alert-title,.rst-content .important .admonition-title,.rst-content .important .wy-alert-title,.rst-content .tip .admonition-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.admonition-todo .admonition-title,.rst-content .wy-alert-success.admonition-todo .wy-alert-title,.rst-content .wy-alert-success.admonition .admonition-title,.rst-content .wy-alert-success.admonition .wy-alert-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.seealso .admonition-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.wy-alert.wy-alert-success .wy-alert-title{background:#1abc9c}.rst-content .wy-alert-neutral.admonition,.rst-content .wy-alert-neutral.admonition-todo,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.seealso,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.wy-alert.wy-alert-neutral{background:#f3f6f6}.rst-content .wy-alert-neutral.admonition-todo .admonition-title,.rst-content .wy-alert-neutral.admonition-todo .wy-alert-title,.rst-content .wy-alert-neutral.admonition .admonition-title,.rst-content .wy-alert-neutral.admonition .wy-alert-title,.rst-content .wy-alert-neutral.attention .admonition-title,.rst-content .wy-alert-neutral.attention .wy-alert-title,.rst-content .wy-alert-neutral.caution .admonition-title,.rst-content .wy-alert-neutral.caution .wy-alert-title,.rst-content .wy-alert-neutral.danger .admonition-title,.rst-content .wy-alert-neutral.danger .wy-alert-title,.rst-content .wy-alert-neutral.error .admonition-title,.rst-content .wy-alert-neutral.error .wy-alert-title,.rst-content .wy-alert-neutral.hint .admonition-title,.rst-content .wy-alert-neutral.hint .wy-alert-title,.rst-content .wy-alert-neutral.important .admonition-title,.rst-content .wy-alert-neutral.important .wy-alert-title,.rst-content .wy-alert-neutral.note .admonition-title,.rst-content .wy-alert-neutral.note .wy-alert-title,.rst-content .wy-alert-neutral.seealso .admonition-title,.rst-content .wy-alert-neutral.seealso .wy-alert-title,.rst-content .wy-alert-neutral.tip .admonition-title,.rst-content .wy-alert-neutral.tip .wy-alert-title,.rst-content .wy-alert-neutral.warning .admonition-title,.rst-content .wy-alert-neutral.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-neutral .admonition-title,.wy-alert.wy-alert-neutral .rst-content .admonition-title,.wy-alert.wy-alert-neutral .wy-alert-title{color:#404040;background:#e1e4e5}.rst-content .wy-alert-neutral.admonition-todo a,.rst-content .wy-alert-neutral.admonition a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.seealso a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.wy-alert.wy-alert-neutral a{color:#2980b9}.rst-content .admonition-todo p:last-child,.rst-content .admonition p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .note p:last-child,.rst-content .seealso p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.wy-alert p:last-child{margin-bottom:0}.wy-tray-container{position:fixed;bottom:0;left:0;z-index:600}.wy-tray-container li{display:block;width:300px;background:transparent;color:#fff;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,.1);padding:0 24px;min-width:20%;opacity:0;height:0;line-height:56px;overflow:hidden;-webkit-transition:all .3s ease-in;-moz-transition:all .3s ease-in;transition:all .3s ease-in}.wy-tray-container li.wy-tray-item-success{background:#27ae60}.wy-tray-container li.wy-tray-item-info{background:#2980b9}.wy-tray-container li.wy-tray-item-warning{background:#e67e22}.wy-tray-container li.wy-tray-item-danger{background:#e74c3c}.wy-tray-container li.on{opacity:1;height:56px}@media screen and (max-width:768px){.wy-tray-container{bottom:auto;top:0;width:100%}.wy-tray-container li{width:100%}}button{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle;cursor:pointer;line-height:normal;-webkit-appearance:button;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}.btn{display:inline-block;border-radius:2px;line-height:normal;white-space:nowrap;text-align:center;cursor:pointer;font-size:100%;padding:6px 12px 8px;color:#fff;border:1px solid rgba(0,0,0,.1);background-color:#27ae60;text-decoration:none;font-weight:400;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 2px -1px hsla(0,0%,100%,.5),inset 0 -2px 0 0 rgba(0,0,0,.1);outline-none:false;vertical-align:middle;*display:inline;zoom:1;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-webkit-transition:all .1s linear;-moz-transition:all .1s linear;transition:all .1s linear}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;outline:0}.btn:active{box-shadow:inset 0 -1px 0 0 rgba(0,0,0,.05),inset 0 2px 0 0 rgba(0,0,0,.1);padding:8px 12px 6px}.btn:visited{color:#fff}.btn-disabled,.btn-disabled:active,.btn-disabled:focus,.btn-disabled:hover,.btn:disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980b9!important}.btn-info:hover{background-color:#2e8ece!important}.btn-neutral{background-color:#f3f6f6!important;color:#404040!important}.btn-neutral:hover{background-color:#e5ebeb!important;color:#404040}.btn-neutral:visited{color:#404040!important}.btn-success{background-color:#27ae60!important}.btn-success:hover{background-color:#295!important}.btn-danger{background-color:#e74c3c!important}.btn-danger:hover{background-color:#ea6153!important}.btn-warning{background-color:#e67e22!important}.btn-warning:hover{background-color:#e98b39!important}.btn-invert{background-color:#222}.btn-invert:hover{background-color:#2f2f2f!important}.btn-link{background-color:transparent!important;color:#2980b9;box-shadow:none;border-color:transparent!important}.btn-link:active,.btn-link:hover{background-color:transparent!important;color:#409ad5!important;box-shadow:none}.btn-link:visited{color:#9b59b6}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:after,.wy-btn-group:before{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown-active .wy-dropdown-menu{display:block}.wy-dropdown-menu{position:absolute;left:0;display:none;float:left;top:100%;min-width:100%;background:#fcfcfc;z-index:100;border:1px solid #cfd7dd;box-shadow:0 2px 2px 0 rgba(0,0,0,.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px;cursor:pointer}.wy-dropdown-menu>dd>a:hover{background:#2980b9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:1px solid #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type=search]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-up .wy-dropdown-menu{bottom:100%;top:auto;left:auto;right:0}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980b9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;left:auto;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned .wy-help-inline,.wy-form-aligned input,.wy-form-aligned label,.wy-form-aligned select,.wy-form-aligned textarea{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:6px 12px 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:6px}fieldset{margin:0}fieldset,legend{border:0;padding:0}legend{width:100%;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label,legend{display:block}label{margin:0 0 .3125em;color:#333;font-size:90%}input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}.wy-control-group{margin-bottom:24px;max-width:1200px;margin-left:auto;margin-right:auto;*zoom:1}.wy-control-group:after,.wy-control-group:before{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-required>label:after{content:" *";color:#e74c3c}.wy-control-group .wy-form-full,.wy-control-group .wy-form-halves,.wy-control-group .wy-form-thirds{padding-bottom:12px}.wy-control-group .wy-form-full input[type=color],.wy-control-group .wy-form-full input[type=date],.wy-control-group .wy-form-full input[type=datetime-local],.wy-control-group .wy-form-full input[type=datetime],.wy-control-group .wy-form-full input[type=email],.wy-control-group .wy-form-full input[type=month],.wy-control-group .wy-form-full input[type=number],.wy-control-group .wy-form-full input[type=password],.wy-control-group .wy-form-full input[type=search],.wy-control-group .wy-form-full input[type=tel],.wy-control-group .wy-form-full input[type=text],.wy-control-group .wy-form-full input[type=time],.wy-control-group .wy-form-full input[type=url],.wy-control-group .wy-form-full input[type=week],.wy-control-group .wy-form-full select,.wy-control-group .wy-form-halves input[type=color],.wy-control-group .wy-form-halves input[type=date],.wy-control-group .wy-form-halves input[type=datetime-local],.wy-control-group .wy-form-halves input[type=datetime],.wy-control-group .wy-form-halves input[type=email],.wy-control-group .wy-form-halves input[type=month],.wy-control-group .wy-form-halves input[type=number],.wy-control-group .wy-form-halves input[type=password],.wy-control-group .wy-form-halves input[type=search],.wy-control-group .wy-form-halves input[type=tel],.wy-control-group .wy-form-halves input[type=text],.wy-control-group .wy-form-halves input[type=time],.wy-control-group .wy-form-halves input[type=url],.wy-control-group .wy-form-halves input[type=week],.wy-control-group .wy-form-halves select,.wy-control-group .wy-form-thirds input[type=color],.wy-control-group .wy-form-thirds input[type=date],.wy-control-group .wy-form-thirds input[type=datetime-local],.wy-control-group .wy-form-thirds input[type=datetime],.wy-control-group .wy-form-thirds input[type=email],.wy-control-group .wy-form-thirds input[type=month],.wy-control-group .wy-form-thirds input[type=number],.wy-control-group .wy-form-thirds input[type=password],.wy-control-group .wy-form-thirds input[type=search],.wy-control-group .wy-form-thirds input[type=tel],.wy-control-group .wy-form-thirds input[type=text],.wy-control-group .wy-form-thirds input[type=time],.wy-control-group .wy-form-thirds input[type=url],.wy-control-group .wy-form-thirds input[type=week],.wy-control-group .wy-form-thirds select{width:100%}.wy-control-group .wy-form-full{float:left;display:block;width:100%;margin-right:0}.wy-control-group .wy-form-full:last-child{margin-right:0}.wy-control-group .wy-form-halves{float:left;display:block;margin-right:2.35765%;width:48.82117%}.wy-control-group .wy-form-halves:last-child,.wy-control-group .wy-form-halves:nth-of-type(2n){margin-right:0}.wy-control-group .wy-form-halves:nth-of-type(odd){clear:left}.wy-control-group .wy-form-thirds{float:left;display:block;margin-right:2.35765%;width:31.76157%}.wy-control-group .wy-form-thirds:last-child,.wy-control-group .wy-form-thirds:nth-of-type(3n){margin-right:0}.wy-control-group .wy-form-thirds:nth-of-type(3n+1){clear:left}.wy-control-group.wy-control-group-no-input .wy-control,.wy-control-no-input{margin:6px 0 0;font-size:90%}.wy-control-no-input{display:inline-block}.wy-control-group.fluid-input input[type=color],.wy-control-group.fluid-input input[type=date],.wy-control-group.fluid-input input[type=datetime-local],.wy-control-group.fluid-input input[type=datetime],.wy-control-group.fluid-input input[type=email],.wy-control-group.fluid-input input[type=month],.wy-control-group.fluid-input input[type=number],.wy-control-group.fluid-input input[type=password],.wy-control-group.fluid-input input[type=search],.wy-control-group.fluid-input input[type=tel],.wy-control-group.fluid-input input[type=text],.wy-control-group.fluid-input input[type=time],.wy-control-group.fluid-input input[type=url],.wy-control-group.fluid-input input[type=week]{width:100%}.wy-form-message-inline{padding-left:.3em;color:#666;font-size:90%}.wy-form-message{display:block;color:#999;font-size:70%;margin-top:.3125em;font-style:italic}.wy-form-message p{font-size:inherit;font-style:italic;margin-bottom:6px}.wy-form-message p:last-child{margin-bottom:0}input{line-height:normal}input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;*overflow:visible}input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}input[type=datetime-local]{padding:.34375em .625em}input[disabled]{cursor:default}input[type=checkbox],input[type=radio]{padding:0;margin-right:.3125em;*height:13px;*width:13px}input[type=checkbox],input[type=radio],input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}input[type=color]:focus,input[type=date]:focus,input[type=datetime-local]:focus,input[type=datetime]:focus,input[type=email]:focus,input[type=month]:focus,input[type=number]:focus,input[type=password]:focus,input[type=search]:focus,input[type=tel]:focus,input[type=text]:focus,input[type=time]:focus,input[type=url]:focus,input[type=week]:focus{outline:0;outline:thin dotted\9;border-color:#333}input.no-focus:focus{border-color:#ccc!important}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:thin dotted #333;outline:1px auto #129fea}input[type=color][disabled],input[type=date][disabled],input[type=datetime-local][disabled],input[type=datetime][disabled],input[type=email][disabled],input[type=month][disabled],input[type=number][disabled],input[type=password][disabled],input[type=search][disabled],input[type=tel][disabled],input[type=text][disabled],input[type=time][disabled],input[type=url][disabled],input[type=week][disabled]{cursor:not-allowed;background-color:#fafafa}input:focus:invalid,select:focus:invalid,textarea:focus:invalid{color:#e74c3c;border:1px solid #e74c3c}input:focus:invalid:focus,select:focus:invalid:focus,textarea:focus:invalid:focus{border-color:#e74c3c}input[type=checkbox]:focus:invalid:focus,input[type=file]:focus:invalid:focus,input[type=radio]:focus:invalid:focus{outline-color:#e74c3c}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif}select,textarea{padding:.5em .625em;display:inline-block;border:1px solid #ccc;font-size:80%;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}input[readonly],select[disabled],select[readonly],textarea[disabled],textarea[readonly]{cursor:not-allowed;background-color:#fafafa}input[type=checkbox][disabled],input[type=radio][disabled]{cursor:not-allowed}.wy-checkbox,.wy-radio{margin:6px 0;color:#404040;display:block}.wy-checkbox input,.wy-radio input{vertical-align:baseline}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap;padding:6px}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{line-height:27px;padding:0 8px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:1px solid #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-switch{position:relative;display:block;height:24px;margin-top:12px;cursor:pointer}.wy-switch:before{left:0;top:0;width:36px;height:12px;background:#ccc}.wy-switch:after,.wy-switch:before{position:absolute;content:"";display:block;border-radius:4px;-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.wy-switch:after{width:18px;height:18px;background:#999;left:-3px;top:-3px}.wy-switch span{position:absolute;left:48px;display:block;font-size:12px;color:#ccc;line-height:1}.wy-switch.active:before{background:#1e8449}.wy-switch.active:after{left:24px;background:#27ae60}.wy-switch.disabled{cursor:not-allowed;opacity:.8}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error>label{color:#e74c3c}.wy-control-group.wy-control-group-error input[type=color],.wy-control-group.wy-control-group-error input[type=date],.wy-control-group.wy-control-group-error input[type=datetime-local],.wy-control-group.wy-control-group-error input[type=datetime],.wy-control-group.wy-control-group-error input[type=email],.wy-control-group.wy-control-group-error input[type=month],.wy-control-group.wy-control-group-error input[type=number],.wy-control-group.wy-control-group-error input[type=password],.wy-control-group.wy-control-group-error input[type=search],.wy-control-group.wy-control-group-error input[type=tel],.wy-control-group.wy-control-group-error input[type=text],.wy-control-group.wy-control-group-error input[type=time],.wy-control-group.wy-control-group-error input[type=url],.wy-control-group.wy-control-group-error input[type=week],.wy-control-group.wy-control-group-error textarea{border:1px solid #e74c3c}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:.5em .625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27ae60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#e74c3c}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#e67e22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980b9}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}@media only screen and (max-width:480px){.wy-form button[type=submit]{margin:.7em 0 0}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=text],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week],.wy-form label{margin-bottom:.3em;display:block}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-control{margin:1.5em 0 0}.wy-form-message,.wy-form-message-inline,.wy-form .wy-help-inline{display:block;font-size:80%;padding:6px 0}}@media screen and (max-width:768px){.tablet-hide{display:none}}@media screen and (max-width:480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.rst-content table.docutils,.rst-content table.field-list,.wy-table{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.rst-content table.docutils caption,.rst-content table.field-list caption,.wy-table caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.rst-content table.docutils td,.rst-content table.docutils th,.rst-content table.field-list td,.rst-content table.field-list th,.wy-table td,.wy-table th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.rst-content table.docutils td:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list td:first-child,.rst-content table.field-list th:first-child,.wy-table td:first-child,.wy-table th:first-child{border-left-width:0}.rst-content table.docutils thead,.rst-content table.field-list thead,.wy-table thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.rst-content table.docutils thead th,.rst-content table.field-list thead th,.wy-table thead th{font-weight:700;border-bottom:2px solid #e1e4e5}.rst-content table.docutils td,.rst-content table.field-list td,.wy-table td{background-color:transparent;vertical-align:middle}.rst-content table.docutils td p,.rst-content table.field-list td p,.wy-table td p{line-height:18px}.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child,.wy-table td p:last-child{margin-bottom:0}.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min,.wy-table .wy-table-cell-min{width:1%;padding-right:0}.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:grey;font-size:90%}.wy-table-tertiary{color:grey;font-size:80%}.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td,.wy-table-backed,.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td{background-color:#f3f6f6}.rst-content table.docutils,.wy-table-bordered-all{border:1px solid #e1e4e5}.rst-content table.docutils td,.wy-table-bordered-all td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.rst-content table.docutils tbody>tr:last-child td,.wy-table-bordered-all tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0!important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}a{color:#2980b9;text-decoration:none;cursor:pointer}a:hover{color:#3091d1}a:visited{color:#9b59b6}html{height:100%}body,html{overflow-x:hidden}body{font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;font-weight:400;color:#404040;min-height:100%;background:#edf0f2}.wy-text-left{text-align:left}.wy-text-center{text-align:center}.wy-text-right{text-align:right}.wy-text-large{font-size:120%}.wy-text-normal{font-size:100%}.wy-text-small,small{font-size:80%}.wy-text-strike{text-decoration:line-through}.wy-text-warning{color:#e67e22!important}a.wy-text-warning:hover{color:#eb9950!important}.wy-text-info{color:#2980b9!important}a.wy-text-info:hover{color:#409ad5!important}.wy-text-success{color:#27ae60!important}a.wy-text-success:hover{color:#36d278!important}.wy-text-danger{color:#e74c3c!important}a.wy-text-danger:hover{color:#ed7669!important}.wy-text-neutral{color:#404040!important}a.wy-text-neutral:hover{color:#595959!important}.rst-content .toctree-wrapper>p.caption,h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif}p{line-height:24px;font-size:16px;margin:0 0 24px}h1{font-size:175%}.rst-content .toctree-wrapper>p.caption,h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}hr{display:block;height:1px;border:0;border-top:1px solid #e1e4e5;margin:24px 0;padding:0}.rst-content code,.rst-content tt,code{white-space:nowrap;max-width:100%;background:#fff;border:1px solid #e1e4e5;font-size:75%;padding:0 5px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#e74c3c;overflow-x:auto}.rst-content tt.code-large,code.code-large{font-size:90%}.rst-content .section ul,.rst-content .toctree-wrapper ul,.rst-content section ul,.wy-plain-list-disc,article ul{list-style:disc;line-height:24px;margin-bottom:24px}.rst-content .section ul li,.rst-content .toctree-wrapper ul li,.rst-content section ul li,.wy-plain-list-disc li,article ul li{list-style:disc;margin-left:24px}.rst-content .section ul li p:last-child,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li p:last-child,.rst-content .toctree-wrapper ul li ul,.rst-content section ul li p:last-child,.rst-content section ul li ul,.wy-plain-list-disc li p:last-child,.wy-plain-list-disc li ul,article ul li p:last-child,article ul li ul{margin-bottom:0}.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li,.rst-content section ul li li,.wy-plain-list-disc li li,article ul li li{list-style:circle}.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li,.rst-content section ul li li li,.wy-plain-list-disc li li li,article ul li li li{list-style:square}.rst-content .section ul li ol li,.rst-content .toctree-wrapper ul li ol li,.rst-content section ul li ol li,.wy-plain-list-disc li ol li,article ul li ol li{list-style:decimal}.rst-content .section ol,.rst-content .section ol.arabic,.rst-content .toctree-wrapper ol,.rst-content .toctree-wrapper ol.arabic,.rst-content section ol,.rst-content section ol.arabic,.wy-plain-list-decimal,article ol{list-style:decimal;line-height:24px;margin-bottom:24px}.rst-content .section ol.arabic li,.rst-content .section ol li,.rst-content .toctree-wrapper ol.arabic li,.rst-content .toctree-wrapper ol li,.rst-content section ol.arabic li,.rst-content section ol li,.wy-plain-list-decimal li,article ol li{list-style:decimal;margin-left:24px}.rst-content .section ol.arabic li ul,.rst-content .section ol li p:last-child,.rst-content .section ol li ul,.rst-content .toctree-wrapper ol.arabic li ul,.rst-content .toctree-wrapper ol li p:last-child,.rst-content .toctree-wrapper ol li ul,.rst-content section ol.arabic li ul,.rst-content section ol li p:last-child,.rst-content section ol li ul,.wy-plain-list-decimal li p:last-child,.wy-plain-list-decimal li ul,article ol li p:last-child,article ol li ul{margin-bottom:0}.rst-content .section ol.arabic li ul li,.rst-content .section ol li ul li,.rst-content .toctree-wrapper ol.arabic li ul li,.rst-content .toctree-wrapper ol li ul li,.rst-content section ol.arabic li ul li,.rst-content section ol li ul li,.wy-plain-list-decimal li ul li,article ol li ul li{list-style:disc}.wy-breadcrumbs{*zoom:1}.wy-breadcrumbs:after,.wy-breadcrumbs:before{display:table;content:""}.wy-breadcrumbs:after{clear:both}.wy-breadcrumbs>li{display:inline-block;padding-top:5px}.wy-breadcrumbs>li.wy-breadcrumbs-aside{float:right}.rst-content .wy-breadcrumbs>li code,.rst-content .wy-breadcrumbs>li tt,.wy-breadcrumbs>li .rst-content tt,.wy-breadcrumbs>li code{all:inherit;color:inherit}.breadcrumb-item:before{content:"/";color:#bbb;font-size:13px;padding:0 6px 0 3px}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width:480px){.wy-breadcrumbs-extra,.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}html{font-size:16px}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:after,.wy-menu-horiz:before{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz li,.wy-menu-horiz ul{display:inline-block}.wy-menu-horiz li:hover{background:hsla(0,0%,100%,.1)}.wy-menu-horiz li.divide-left{border-left:1px solid #404040}.wy-menu-horiz li.divide-right{border-right:1px solid #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical{width:300px}.wy-menu-vertical header,.wy-menu-vertical p.caption{color:#55a5d9;height:32px;line-height:32px;padding:0 1.618em;margin:12px 0 0;display:block;font-weight:700;text-transform:uppercase;font-size:85%;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:1px solid #404040}.wy-menu-vertical li.divide-bottom{border-bottom:1px solid #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:grey;border-right:1px solid #c9c9c9;padding:.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.rst-content .wy-menu-vertical li tt,.wy-menu-vertical li .rst-content tt,.wy-menu-vertical li code{border:none;background:inherit;color:inherit;padding-left:0;padding-right:0}.wy-menu-vertical li button.toctree-expand{display:block;float:left;margin-left:-1.2em;line-height:18px;color:#4d4d4d;border:none;background:none;padding:0}.wy-menu-vertical li.current>a,.wy-menu-vertical li.on a{color:#404040;font-weight:700;position:relative;background:#fcfcfc;border:none;padding:.4045em 1.618em}.wy-menu-vertical li.current>a:hover,.wy-menu-vertical li.on a:hover{background:#fcfcfc}.wy-menu-vertical li.current>a:hover button.toctree-expand,.wy-menu-vertical li.on a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand{display:block;line-height:18px;color:#333}.wy-menu-vertical li.toctree-l1.current>a{border-bottom:1px solid #c9c9c9;border-top:1px solid #c9c9c9}.wy-menu-vertical .toctree-l1.current .toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .toctree-l11>ul{display:none}.wy-menu-vertical .toctree-l1.current .current.toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .current.toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .current.toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .current.toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .current.toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .current.toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .current.toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .current.toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .current.toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .current.toctree-l11>ul{display:block}.wy-menu-vertical li.toctree-l3,.wy-menu-vertical li.toctree-l4{font-size:.9em}.wy-menu-vertical li.toctree-l2 a,.wy-menu-vertical li.toctree-l3 a,.wy-menu-vertical li.toctree-l4 a,.wy-menu-vertical li.toctree-l5 a,.wy-menu-vertical li.toctree-l6 a,.wy-menu-vertical li.toctree-l7 a,.wy-menu-vertical li.toctree-l8 a,.wy-menu-vertical li.toctree-l9 a,.wy-menu-vertical li.toctree-l10 a{color:#404040}.wy-menu-vertical li.toctree-l2 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l3 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l4 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l5 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l6 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l7 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l8 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l9 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l10 a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a,.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a,.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a,.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a,.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a,.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a,.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a,.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{display:block}.wy-menu-vertical li.toctree-l2.current>a{padding:.4045em 2.427em}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{padding:.4045em 1.618em .4045em 4.045em}.wy-menu-vertical li.toctree-l3.current>a{padding:.4045em 4.045em}.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{padding:.4045em 1.618em .4045em 5.663em}.wy-menu-vertical li.toctree-l4.current>a{padding:.4045em 5.663em}.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a{padding:.4045em 1.618em .4045em 7.281em}.wy-menu-vertical li.toctree-l5.current>a{padding:.4045em 7.281em}.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a{padding:.4045em 1.618em .4045em 8.899em}.wy-menu-vertical li.toctree-l6.current>a{padding:.4045em 8.899em}.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a{padding:.4045em 1.618em .4045em 10.517em}.wy-menu-vertical li.toctree-l7.current>a{padding:.4045em 10.517em}.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a{padding:.4045em 1.618em .4045em 12.135em}.wy-menu-vertical li.toctree-l8.current>a{padding:.4045em 12.135em}.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a{padding:.4045em 1.618em .4045em 13.753em}.wy-menu-vertical li.toctree-l9.current>a{padding:.4045em 13.753em}.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a{padding:.4045em 1.618em .4045em 15.371em}.wy-menu-vertical li.toctree-l10.current>a{padding:.4045em 15.371em}.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{padding:.4045em 1.618em .4045em 16.989em}.wy-menu-vertical li.toctree-l2.current>a,.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{background:#c9c9c9}.wy-menu-vertical li.toctree-l2 button.toctree-expand{color:#a3a3a3}.wy-menu-vertical li.toctree-l3.current>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{background:#bdbdbd}.wy-menu-vertical li.toctree-l3 button.toctree-expand{color:#969696}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical li ul li a{margin-bottom:0;color:#d9d9d9;font-weight:400}.wy-menu-vertical a{line-height:18px;padding:.4045em 1.618em;display:block;position:relative;font-size:90%;color:#d9d9d9}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:hover button.toctree-expand{color:#d9d9d9}.wy-menu-vertical a:active{background-color:#2980b9;cursor:pointer;color:#fff}.wy-menu-vertical a:active button.toctree-expand{color:#fff}.wy-side-nav-search{display:block;width:300px;padding:.809em;margin-bottom:.809em;z-index:200;background-color:#2980b9;text-align:center;color:#fcfcfc}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto .809em;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-side-nav-search .wy-dropdown>a,.wy-side-nav-search>a{color:#fcfcfc;font-size:100%;font-weight:700;display:inline-block;padding:4px 6px;margin-bottom:.809em;max-width:100%}.wy-side-nav-search .wy-dropdown>a:hover,.wy-side-nav-search>a:hover{background:hsla(0,0%,100%,.1)}.wy-side-nav-search .wy-dropdown>a img.logo,.wy-side-nav-search>a img.logo{display:block;margin:0 auto;height:auto;width:auto;border-radius:0;max-width:100%;background:transparent}.wy-side-nav-search .wy-dropdown>a.icon img.logo,.wy-side-nav-search>a.icon img.logo{margin-top:.85em}.wy-side-nav-search>div.version{margin-top:-.4045em;margin-bottom:.809em;font-weight:400;color:hsla(0,0%,100%,.3)}.wy-nav .wy-menu-vertical header{color:#2980b9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980b9;color:#fff}[data-menu-wrap]{-webkit-transition:all .2s ease-in;-moz-transition:all .2s ease-in;transition:all .2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:#fcfcfc}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:fixed;top:0;bottom:0;left:0;padding-bottom:2em;width:300px;overflow-x:hidden;overflow-y:hidden;min-height:100%;color:#9b9b9b;background:#343131;z-index:200}.wy-side-scroll{width:320px;position:relative;overflow-x:hidden;overflow-y:scroll;height:100%}.wy-nav-top{display:none;background:#2980b9;color:#fff;padding:.4045em .809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:after,.wy-nav-top:before{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:700}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer;padding-top:inherit}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:grey}footer p{margin-bottom:12px}.rst-content footer span.commit tt,footer span.commit .rst-content tt,footer span.commit code{padding:0;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:1em;background:none;border:none;color:grey}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:after,.rst-footer-buttons:before{width:100%;display:table;content:""}.rst-footer-buttons:after{clear:both}.rst-breadcrumbs-buttons{margin-top:12px;*zoom:1}.rst-breadcrumbs-buttons:after,.rst-breadcrumbs-buttons:before{display:table;content:""}.rst-breadcrumbs-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:1px solid #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:1px solid #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:grey;font-size:90%}.genindextable li>ul{margin-left:24px}@media screen and (max-width:768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-menu.wy-menu-vertical,.wy-side-nav-search,.wy-side-scroll{width:auto}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width:1100px){.wy-nav-content-wrap{background:rgba(0,0,0,.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.rst-versions,.wy-nav-side,footer{display:none}.wy-nav-content-wrap{margin-left:0}}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:after,.rst-versions .rst-current-version:before{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-content .code-block-caption .rst-versions .rst-current-version .headerlink,.rst-content .eqno .rst-versions .rst-current-version .headerlink,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-content code.download .rst-versions .rst-current-version span:first-child,.rst-content dl dt .rst-versions .rst-current-version .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-content p .rst-versions .rst-current-version .headerlink,.rst-content table>caption .rst-versions .rst-current-version .headerlink,.rst-content tt.download .rst-versions .rst-current-version span:first-child,.rst-versions .rst-current-version .fa,.rst-versions .rst-current-version .icon,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-versions .rst-current-version .rst-content .code-block-caption .headerlink,.rst-versions .rst-current-version .rst-content .eqno .headerlink,.rst-versions .rst-current-version .rst-content code.download span:first-child,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-versions .rst-current-version .rst-content p .headerlink,.rst-versions .rst-current-version .rst-content table>caption .headerlink,.rst-versions .rst-current-version .rst-content tt.download span:first-child,.rst-versions .rst-current-version .wy-menu-vertical li button.toctree-expand,.wy-menu-vertical li .rst-versions .rst-current-version button.toctree-expand{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}.rst-content .toctree-wrapper>p.caption,.rst-content h1,.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6{margin-bottom:24px}.rst-content img{max-width:100%;height:auto}.rst-content div.figure,.rst-content figure{margin-bottom:24px}.rst-content div.figure .caption-text,.rst-content figure .caption-text{font-style:italic}.rst-content div.figure p:last-child.caption,.rst-content figure p:last-child.caption{margin-bottom:0}.rst-content div.figure.align-center,.rst-content figure.align-center{text-align:center}.rst-content .section>a>img,.rst-content .section>img,.rst-content section>a>img,.rst-content section>img{margin-bottom:24px}.rst-content abbr[title]{text-decoration:none}.rst-content.style-external-links a.reference.external:after{font-family:FontAwesome;content:"\f08e";color:#b3b3b3;vertical-align:super;font-size:60%;margin:0 .2em}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content pre.literal-block{white-space:pre;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;display:block;overflow:auto}.rst-content div[class^=highlight],.rst-content pre.literal-block{border:1px solid #e1e4e5;overflow-x:auto;margin:1px 0 24px}.rst-content div[class^=highlight] div[class^=highlight],.rst-content pre.literal-block div[class^=highlight]{padding:0;border:none;margin:0}.rst-content div[class^=highlight] td.code{width:100%}.rst-content .linenodiv pre{border-right:1px solid #e6e9ea;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;user-select:none;pointer-events:none}.rst-content div[class^=highlight] pre{white-space:pre;margin:0;padding:12px;display:block;overflow:auto}.rst-content div[class^=highlight] pre .hll{display:block;margin:0 -12px;padding:0 12px}.rst-content .linenodiv pre,.rst-content div[class^=highlight] pre,.rst-content pre.literal-block{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:12px;line-height:1.4}.rst-content div.highlight .gp,.rst-content div.highlight span.linenos{user-select:none;pointer-events:none}.rst-content div.highlight span.linenos{display:inline-block;padding-left:0;padding-right:12px;margin-right:12px;border-right:1px solid #e6e9ea}.rst-content .code-block-caption{font-style:italic;font-size:85%;line-height:1;padding:1em 0;text-align:center}@media print{.rst-content .codeblock,.rst-content div[class^=highlight],.rst-content div[class^=highlight] pre{white-space:pre-wrap}}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning{clear:both}.rst-content .admonition-todo .last,.rst-content .admonition-todo>:last-child,.rst-content .admonition .last,.rst-content .admonition>:last-child,.rst-content .attention .last,.rst-content .attention>:last-child,.rst-content .caution .last,.rst-content .caution>:last-child,.rst-content .danger .last,.rst-content .danger>:last-child,.rst-content .error .last,.rst-content .error>:last-child,.rst-content .hint .last,.rst-content .hint>:last-child,.rst-content .important .last,.rst-content .important>:last-child,.rst-content .note .last,.rst-content .note>:last-child,.rst-content .seealso .last,.rst-content .seealso>:last-child,.rst-content .tip .last,.rst-content .tip>:last-child,.rst-content .warning .last,.rst-content .warning>:last-child{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent!important;border-color:rgba(0,0,0,.1)!important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha>li,.rst-content .toctree-wrapper ol.loweralpha,.rst-content .toctree-wrapper ol.loweralpha>li,.rst-content section ol.loweralpha,.rst-content section ol.loweralpha>li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha>li,.rst-content .toctree-wrapper ol.upperalpha,.rst-content .toctree-wrapper ol.upperalpha>li,.rst-content section ol.upperalpha,.rst-content section ol.upperalpha>li{list-style:upper-alpha}.rst-content .section ol li>*,.rst-content .section ul li>*,.rst-content .toctree-wrapper ol li>*,.rst-content .toctree-wrapper ul li>*,.rst-content section ol li>*,.rst-content section ul li>*{margin-top:12px;margin-bottom:12px}.rst-content .section ol li>:first-child,.rst-content .section ul li>:first-child,.rst-content .toctree-wrapper ol li>:first-child,.rst-content .toctree-wrapper ul li>:first-child,.rst-content section ol li>:first-child,.rst-content section ul li>:first-child{margin-top:0}.rst-content .section ol li>p,.rst-content .section ol li>p:last-child,.rst-content .section ul li>p,.rst-content .section ul li>p:last-child,.rst-content .toctree-wrapper ol li>p,.rst-content .toctree-wrapper ol li>p:last-child,.rst-content .toctree-wrapper ul li>p,.rst-content .toctree-wrapper ul li>p:last-child,.rst-content section ol li>p,.rst-content section ol li>p:last-child,.rst-content section ul li>p,.rst-content section ul li>p:last-child{margin-bottom:12px}.rst-content .section ol li>p:only-child,.rst-content .section ol li>p:only-child:last-child,.rst-content .section ul li>p:only-child,.rst-content .section ul li>p:only-child:last-child,.rst-content .toctree-wrapper ol li>p:only-child,.rst-content .toctree-wrapper ol li>p:only-child:last-child,.rst-content .toctree-wrapper ul li>p:only-child,.rst-content .toctree-wrapper ul li>p:only-child:last-child,.rst-content section ol li>p:only-child,.rst-content section ol li>p:only-child:last-child,.rst-content section ul li>p:only-child,.rst-content section ul li>p:only-child:last-child{margin-bottom:0}.rst-content .section ol li>ol,.rst-content .section ol li>ul,.rst-content .section ul li>ol,.rst-content .section ul li>ul,.rst-content .toctree-wrapper ol li>ol,.rst-content .toctree-wrapper ol li>ul,.rst-content .toctree-wrapper ul li>ol,.rst-content .toctree-wrapper ul li>ul,.rst-content section ol li>ol,.rst-content section ol li>ul,.rst-content section ul li>ol,.rst-content section ul li>ul{margin-bottom:12px}.rst-content .section ol.simple li>*,.rst-content .section ol.simple li ol,.rst-content .section ol.simple li ul,.rst-content .section ul.simple li>*,.rst-content .section ul.simple li ol,.rst-content .section ul.simple li ul,.rst-content .toctree-wrapper ol.simple li>*,.rst-content .toctree-wrapper ol.simple li ol,.rst-content .toctree-wrapper ol.simple li ul,.rst-content .toctree-wrapper ul.simple li>*,.rst-content .toctree-wrapper ul.simple li ol,.rst-content .toctree-wrapper ul.simple li ul,.rst-content section ol.simple li>*,.rst-content section ol.simple li ol,.rst-content section ol.simple li ul,.rst-content section ul.simple li>*,.rst-content section ul.simple li ol,.rst-content section ul.simple li ul{margin-top:0;margin-bottom:0}.rst-content .line-block{margin-left:0;margin-bottom:24px;line-height:24px}.rst-content .line-block .line-block{margin-left:24px;margin-bottom:0}.rst-content .topic-title{font-weight:700;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0 0 24px 24px}.rst-content .align-left{float:left;margin:0 24px 24px 0}.rst-content .align-center{margin:auto}.rst-content .align-center:not(table){display:block}.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink{opacity:0;font-size:14px;font-family:FontAwesome;margin-left:.5em}.rst-content .code-block-caption .headerlink:focus,.rst-content .code-block-caption:hover .headerlink,.rst-content .eqno .headerlink:focus,.rst-content .eqno:hover .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink:focus,.rst-content .toctree-wrapper>p.caption:hover .headerlink,.rst-content dl dt .headerlink:focus,.rst-content dl dt:hover .headerlink,.rst-content h1 .headerlink:focus,.rst-content h1:hover .headerlink,.rst-content h2 .headerlink:focus,.rst-content h2:hover .headerlink,.rst-content h3 .headerlink:focus,.rst-content h3:hover .headerlink,.rst-content h4 .headerlink:focus,.rst-content h4:hover .headerlink,.rst-content h5 .headerlink:focus,.rst-content h5:hover .headerlink,.rst-content h6 .headerlink:focus,.rst-content h6:hover .headerlink,.rst-content p.caption .headerlink:focus,.rst-content p.caption:hover .headerlink,.rst-content p .headerlink:focus,.rst-content p:hover .headerlink,.rst-content table>caption .headerlink:focus,.rst-content table>caption:hover .headerlink{opacity:1}.rst-content p a{overflow-wrap:anywhere}.rst-content .wy-table td p,.rst-content .wy-table td ul,.rst-content .wy-table th p,.rst-content .wy-table th ul,.rst-content table.docutils td p,.rst-content table.docutils td ul,.rst-content table.docutils th p,.rst-content table.docutils th ul,.rst-content table.field-list td p,.rst-content table.field-list td ul,.rst-content table.field-list th p,.rst-content table.field-list th ul{font-size:inherit}.rst-content .btn:focus{outline:2px solid}.rst-content table>caption .headerlink:after{font-size:12px}.rst-content .centered{text-align:center}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:1px solid #e1e4e5}.rst-content .sidebar dl,.rst-content .sidebar p,.rst-content .sidebar ul{font-size:90%}.rst-content .sidebar .last,.rst-content .sidebar>:last-child{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif;font-weight:700;background:#e1e4e5;padding:6px 12px;margin:-24px -24px 24px;font-size:100%}.rst-content .highlighted{background:#f1c40f;box-shadow:0 0 0 2px #f1c40f;display:inline;font-weight:700}.rst-content .citation-reference,.rst-content .footnote-reference{vertical-align:baseline;position:relative;top:-.4em;line-height:0;font-size:90%}.rst-content .citation-reference>span.fn-bracket,.rst-content .footnote-reference>span.fn-bracket{display:none}.rst-content .hlist{width:100%}.rst-content dl dt span.classifier:before{content:" : "}.rst-content dl dt span.classifier-delimiter{display:none!important}html.writer-html4 .rst-content table.docutils.citation,html.writer-html4 .rst-content table.docutils.footnote{background:none;border:none}html.writer-html4 .rst-content table.docutils.citation td,html.writer-html4 .rst-content table.docutils.citation tr,html.writer-html4 .rst-content table.docutils.footnote td,html.writer-html4 .rst-content table.docutils.footnote tr{border:none;background-color:transparent!important;white-space:normal}html.writer-html4 .rst-content table.docutils.citation td.label,html.writer-html4 .rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{display:grid;grid-template-columns:auto minmax(80%,95%)}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{display:inline-grid;grid-template-columns:max-content auto}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{display:grid;grid-template-columns:auto auto minmax(.65rem,auto) minmax(40%,95%)}html.writer-html5 .rst-content aside.citation>span.label,html.writer-html5 .rst-content aside.footnote>span.label,html.writer-html5 .rst-content div.citation>span.label{grid-column-start:1;grid-column-end:2}html.writer-html5 .rst-content aside.citation>span.backrefs,html.writer-html5 .rst-content aside.footnote>span.backrefs,html.writer-html5 .rst-content div.citation>span.backrefs{grid-column-start:2;grid-column-end:3;grid-row-start:1;grid-row-end:3}html.writer-html5 .rst-content aside.citation>p,html.writer-html5 .rst-content aside.footnote>p,html.writer-html5 .rst-content div.citation>p{grid-column-start:4;grid-column-end:5}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{margin-bottom:24px}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{padding-left:1rem}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dd,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dd,html.writer-html5 .rst-content dl.footnote>dt{margin-bottom:0}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{font-size:.9rem}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.footnote>dt{margin:0 .5rem .5rem 0;line-height:1.2rem;word-break:break-all;font-weight:400}html.writer-html5 .rst-content dl.citation>dt>span.brackets:before,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:before{content:"["}html.writer-html5 .rst-content dl.citation>dt>span.brackets:after,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:after{content:"]"}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a{word-break:keep-all}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a:not(:first-child):before,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.footnote>dd{margin:0 0 .5rem;line-height:1.2rem}html.writer-html5 .rst-content dl.citation>dd p,html.writer-html5 .rst-content dl.footnote>dd p{font-size:.9rem}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{padding-left:1rem;padding-right:1rem;font-size:.9rem;line-height:1.2rem}html.writer-html5 .rst-content aside.citation p,html.writer-html5 .rst-content aside.footnote p,html.writer-html5 .rst-content div.citation p{font-size:.9rem;line-height:1.2rem;margin-bottom:12px}html.writer-html5 .rst-content aside.citation span.backrefs,html.writer-html5 .rst-content aside.footnote span.backrefs,html.writer-html5 .rst-content div.citation span.backrefs{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content aside.citation span.backrefs>a,html.writer-html5 .rst-content aside.footnote span.backrefs>a,html.writer-html5 .rst-content div.citation span.backrefs>a{word-break:keep-all}html.writer-html5 .rst-content aside.citation span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content aside.footnote span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content div.citation span.backrefs>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content aside.citation span.label,html.writer-html5 .rst-content aside.footnote span.label,html.writer-html5 .rst-content div.citation span.label{line-height:1.2rem}html.writer-html5 .rst-content aside.citation-list,html.writer-html5 .rst-content aside.footnote-list,html.writer-html5 .rst-content div.citation-list{margin-bottom:24px}html.writer-html5 .rst-content dl.option-list kbd{font-size:.9rem}.rst-content table.docutils.footnote,html.writer-html4 .rst-content table.docutils.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content aside.footnote-list aside.footnote,html.writer-html5 .rst-content div.citation-list>div.citation,html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{color:grey}.rst-content table.docutils.footnote code,.rst-content table.docutils.footnote tt,html.writer-html4 .rst-content table.docutils.citation code,html.writer-html4 .rst-content table.docutils.citation tt,html.writer-html5 .rst-content aside.footnote-list aside.footnote code,html.writer-html5 .rst-content aside.footnote-list aside.footnote tt,html.writer-html5 .rst-content aside.footnote code,html.writer-html5 .rst-content aside.footnote tt,html.writer-html5 .rst-content div.citation-list>div.citation code,html.writer-html5 .rst-content div.citation-list>div.citation tt,html.writer-html5 .rst-content dl.citation code,html.writer-html5 .rst-content dl.citation tt,html.writer-html5 .rst-content dl.footnote code,html.writer-html5 .rst-content dl.footnote tt{color:#555}.rst-content .wy-table-responsive.citation,.rst-content .wy-table-responsive.footnote{margin-bottom:0}.rst-content .wy-table-responsive.citation+:not(.citation),.rst-content .wy-table-responsive.footnote+:not(.footnote){margin-top:24px}.rst-content .wy-table-responsive.citation:last-child,.rst-content .wy-table-responsive.footnote:last-child{margin-bottom:24px}.rst-content table.docutils th{border-color:#e1e4e5}html.writer-html5 .rst-content table.docutils th{border:1px solid #e1e4e5}html.writer-html5 .rst-content table.docutils td>p,html.writer-html5 .rst-content table.docutils th>p{line-height:1rem;margin-bottom:0;font-size:.9rem}.rst-content table.docutils td .last,.rst-content table.docutils td .last>:last-child{margin-bottom:0}.rst-content table.field-list,.rst-content table.field-list td{border:none}.rst-content table.field-list td p{line-height:inherit}.rst-content table.field-list td>strong{display:inline-block}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left}.rst-content code,.rst-content tt{color:#000;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;padding:2px 5px}.rst-content code big,.rst-content code em,.rst-content tt big,.rst-content tt em{font-size:100%!important;line-height:normal}.rst-content code.literal,.rst-content tt.literal{color:#e74c3c;white-space:normal}.rst-content code.xref,.rst-content tt.xref,a .rst-content code,a .rst-content tt{font-weight:700;color:#404040;overflow-wrap:normal}.rst-content kbd,.rst-content pre,.rst-content samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace}.rst-content a code,.rst-content a tt{color:#2980b9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:700;margin-bottom:12px}.rst-content dl ol,.rst-content dl p,.rst-content dl table,.rst-content dl ul{margin-bottom:12px}.rst-content dl dd{margin:0 0 12px 24px;line-height:24px}.rst-content dl dd>ol:last-child,.rst-content dl dd>p:last-child,.rst-content dl dd>table:last-child,.rst-content dl dd>ul:last-child{margin-bottom:0}html.writer-html4 .rst-content dl:not(.docutils),html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple){margin-bottom:24px}html.writer-html4 .rst-content dl:not(.docutils)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{display:table;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980b9;border-top:3px solid #6ab0de;padding:6px;position:relative}html.writer-html4 .rst-content dl:not(.docutils)>dt:before,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:before{color:#6ab0de}html.writer-html4 .rst-content dl:not(.docutils)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{margin-bottom:6px;border:none;border-left:3px solid #ccc;background:#f0f0f0;color:#555}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils)>dt:first-child,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:first-child{margin-top:0}html.writer-html4 .rst-content dl:not(.docutils) code.descclassname,html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descclassname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{background-color:transparent;border:none;padding:0;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .optional,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .property,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .property{display:inline-block;padding-right:8px;max-width:100%}html.writer-html4 .rst-content dl:not(.docutils) .k,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .k{font-style:italic}html.writer-html4 .rst-content dl:not(.docutils) .descclassname,html.writer-html4 .rst-content dl:not(.docutils) .descname,html.writer-html4 .rst-content dl:not(.docutils) .sig-name,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .sig-name{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#000}.rst-content .viewcode-back,.rst-content .viewcode-link{display:inline-block;color:#27ae60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}.rst-content p.rubric{margin-bottom:12px;font-weight:700}.rst-content code.download,.rst-content tt.download{background:inherit;padding:inherit;font-weight:400;font-family:inherit;font-size:inherit;color:inherit;border:inherit;white-space:inherit}.rst-content code.download span:first-child,.rst-content tt.download span:first-child{-webkit-font-smoothing:subpixel-antialiased}.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{margin-right:4px}.rst-content .guilabel,.rst-content .menuselection{font-size:80%;font-weight:700;border-radius:4px;padding:2.4px 6px;margin:auto 2px}.rst-content .guilabel,.rst-content .menuselection{border:1px solid #7fbbe3;background:#e7f2fa}.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>.kbd,.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>kbd{color:inherit;font-size:80%;background-color:#fff;border:1px solid #a6a6a6;border-radius:4px;box-shadow:0 2px grey;padding:2.4px 6px;margin:auto 0}.rst-content .versionmodified{font-style:italic}@media screen and (max-width:480px){.rst-content .sidebar{width:100%}}span[id*=MathJax-Span]{color:#404040}.math{text-align:center}@font-face{font-family:Lato;src:url(fonts/lato-normal.woff2?bd03a2cc277bbbc338d464e679fe9942) format("woff2"),url(fonts/lato-normal.woff?27bd77b9162d388cb8d4c4217c7c5e2a) format("woff");font-weight:400;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold.woff2?cccb897485813c7c256901dbca54ecf2) format("woff2"),url(fonts/lato-bold.woff?d878b6c29b10beca227e9eef4246111b) format("woff");font-weight:700;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold-italic.woff2?0b6bb6725576b072c5d0b02ecdd1900d) format("woff2"),url(fonts/lato-bold-italic.woff?9c7e4e9eb485b4a121c760e61bc3707c) format("woff");font-weight:700;font-style:italic;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-normal-italic.woff2?4eb103b4d12be57cb1d040ed5e162e9d) format("woff2"),url(fonts/lato-normal-italic.woff?f28f2d6482446544ef1ea1ccc6dd5892) format("woff");font-weight:400;font-style:italic;font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:400;src:url(fonts/Roboto-Slab-Regular.woff2?7abf5b8d04d26a2cafea937019bca958) format("woff2"),url(fonts/Roboto-Slab-Regular.woff?c1be9284088d487c5e3ff0a10a92e58c) format("woff");font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:700;src:url(fonts/Roboto-Slab-Bold.woff2?9984f4a9bda09be08e83f2506954adbe) format("woff2"),url(fonts/Roboto-Slab-Bold.woff?bed5564a116b05148e3b3bea6fb1162a) format("woff");font-display:block} \ No newline at end of file diff --git a/_static/custom.css b/_static/custom.css new file mode 100644 index 00000000..85fec2de --- /dev/null +++ b/_static/custom.css @@ -0,0 +1,14 @@ +.wy-nav-content { + max-width: 60% !important; +} + +.wy-side-nav-search { + display: block; + width: 300px; + padding: 0.809em; + margin-bottom: 0.809em; + z-index: 200; + background-color: #fcfcfc; + text-align: center; + color: #fcfcfc; +} diff --git a/_static/doctools.js b/_static/doctools.js new file mode 100644 index 00000000..d06a71d7 --- /dev/null +++ b/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/_static/documentation_options.js b/_static/documentation_options.js new file mode 100644 index 00000000..d1f22919 --- /dev/null +++ b/_static/documentation_options.js @@ -0,0 +1,13 @@ +const DOCUMENTATION_OPTIONS = { + VERSION: '0.0.1', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/_static/file.png b/_static/file.png new file mode 100644 index 00000000..a858a410 Binary files /dev/null and b/_static/file.png differ diff --git a/_static/jquery.js b/_static/jquery.js new file mode 100644 index 00000000..c4c6022f --- /dev/null +++ b/_static/jquery.js @@ -0,0 +1,2 @@ +/*! jQuery v3.6.0 | (c) OpenJS Foundation and other contributors | jquery.org/license */ +!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],r=Object.getPrototypeOf,s=t.slice,g=t.flat?function(e){return t.flat.call(e)}:function(e){return t.concat.apply([],e)},u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},x=function(e){return null!=e&&e===e.window},E=C.document,c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.6.0",S=function(e,t){return new S.fn.init(e,t)};function p(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp(F),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+F),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\[\\da-fA-F]{1,6}"+M+"?|\\\\([^\\r\\n\\f])","g"),ne=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(p.childNodes),p.childNodes),t[p.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!N[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&(U.test(t)||z.test(t))){(f=ee.test(t)&&ye(e.parentNode)||e)===e&&d.scope||((s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=S)),o=(l=h(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+xe(l[o]);c=l.join(",")}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){N(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return g(t.replace($,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[S]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e&&e.namespaceURI,n=e&&(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:p;return r!=C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),p!=C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.scope=ce(function(e){return a.appendChild(e).appendChild(C.createElement("div")),"undefined"!=typeof e.querySelectorAll&&!e.querySelectorAll(":scope fieldset div").length}),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=S,!C.getElementsByName||!C.getElementsByName(S).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){var t;a.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+S+"-]").length||v.push("~="),(t=C.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||v.push("\\["+M+"*name"+M+"*="+M+"*(?:''|\"\")"),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+S+"+*").length||v.push(".#.+[+~]"),e.querySelectorAll("\\\f"),v.push("[\\r\\n\\f]")}),ce(function(e){e.innerHTML="";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",F)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},j=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e==C||e.ownerDocument==p&&y(p,e)?-1:t==C||t.ownerDocument==p&&y(p,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e==C?-1:t==C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]==p?-1:s[r]==p?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if(T(e),d.matchesSelector&&E&&!N[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){N(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=m[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&m(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?S.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?S.grep(e,function(e){return e===n!==r}):"string"!=typeof n?S.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(S.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||D,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:q.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof S?t[0]:t,S.merge(this,S.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),N.test(r[1])&&S.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(S):S.makeArray(e,this)}).prototype=S.fn,D=S(E);var L=/^(?:parents|prev(?:Until|All))/,H={children:!0,contents:!0,next:!0,prev:!0};function O(e,t){while((e=e[t])&&1!==e.nodeType);return e}S.fn.extend({has:function(e){var t=S(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i;ce=E.createDocumentFragment().appendChild(E.createElement("div")),(fe=E.createElement("input")).setAttribute("type","radio"),fe.setAttribute("checked","checked"),fe.setAttribute("name","t"),ce.appendChild(fe),y.checkClone=ce.cloneNode(!0).cloneNode(!0).lastChild.checked,ce.innerHTML="",y.noCloneChecked=!!ce.cloneNode(!0).lastChild.defaultValue,ce.innerHTML="",y.option=!!ce.lastChild;var ge={thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?S.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;n",""]);var me=/<|&#?\w+;/;function xe(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function je(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&S(e).children("tbody")[0]||e}function De(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function qe(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Le(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(Y.hasData(e)&&(s=Y.get(e).events))for(i in Y.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var _t,zt=[],Ut=/(=)\?(?=&|$)|\?\?/;S.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=zt.pop()||S.expando+"_"+wt.guid++;return this[e]=!0,e}}),S.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Ut.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Ut.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Ut,"$1"+r):!1!==e.jsonp&&(e.url+=(Tt.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||S.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?S(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,zt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((_t=E.implementation.createHTMLDocument("").body).innerHTML="
",2===_t.childNodes.length),S.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=N.exec(e))?[t.createElement(i[1])]:(i=xe([e],t,o),o&&o.length&&S(o).remove(),S.merge([],i.childNodes)));var r,i,o},S.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(S.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},S.expr.pseudos.animated=function(t){return S.grep(S.timers,function(e){return t===e.elem}).length},S.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=S.css(e,"position"),c=S(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=S.css(e,"top"),u=S.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,S.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},S.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){S.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===S.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===S.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=S(e).offset()).top+=S.css(e,"borderTopWidth",!0),i.left+=S.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-S.css(r,"marginTop",!0),left:t.left-i.left-S.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===S.css(e,"position"))e=e.offsetParent;return e||re})}}),S.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;S.fn[t]=function(e){return $(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),S.each(["top","left"],function(e,n){S.cssHooks[n]=Fe(y.pixelPosition,function(e,t){if(t)return t=We(e,n),Pe.test(t)?S(e).position()[n]+"px":t})}),S.each({Height:"height",Width:"width"},function(a,s){S.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){S.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return $(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?S.css(e,t,i):S.style(e,t,n,i)},s,n?e:void 0,n)}})}),S.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){S.fn[t]=function(e){return this.on(t,e)}}),S.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),S.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){S.fn[n]=function(e,t){return 0",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=y.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=y.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),y.elements=c+" "+a,j(b)}function f(a){var b=x[a[v]];return b||(b={},w++,a[v]=w,x[w]=b),b}function g(a,c,d){if(c||(c=b),q)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():u.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||t.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),q)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return y.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(y,b.frag)}function j(a){a||(a=b);var d=f(a);return!y.shivCSS||p||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),q||i(a,d),a}function k(a){for(var b,c=a.getElementsByTagName("*"),e=c.length,f=RegExp("^(?:"+d().join("|")+")$","i"),g=[];e--;)b=c[e],f.test(b.nodeName)&&g.push(b.applyElement(l(b)));return g}function l(a){for(var b,c=a.attributes,d=c.length,e=a.ownerDocument.createElement(A+":"+a.nodeName);d--;)b=c[d],b.specified&&e.setAttribute(b.nodeName,b.nodeValue);return e.style.cssText=a.style.cssText,e}function m(a){for(var b,c=a.split("{"),e=c.length,f=RegExp("(^|[\\s,>+~])("+d().join("|")+")(?=[[\\s,>+~#.:]|$)","gi"),g="$1"+A+"\\:$2";e--;)b=c[e]=c[e].split("}"),b[b.length-1]=b[b.length-1].replace(f,g),c[e]=b.join("}");return c.join("{")}function n(a){for(var b=a.length;b--;)a[b].removeNode()}function o(a){function b(){clearTimeout(g._removeSheetTimer),d&&d.removeNode(!0),d=null}var d,e,g=f(a),h=a.namespaces,i=a.parentWindow;return!B||a.printShived?a:("undefined"==typeof h[A]&&h.add(A),i.attachEvent("onbeforeprint",function(){b();for(var f,g,h,i=a.styleSheets,j=[],l=i.length,n=Array(l);l--;)n[l]=i[l];for(;h=n.pop();)if(!h.disabled&&z.test(h.media)){try{f=h.imports,g=f.length}catch(o){g=0}for(l=0;g>l;l++)n.push(f[l]);try{j.push(h.cssText)}catch(o){}}j=m(j.reverse().join("")),e=k(a),d=c(a,j)}),i.attachEvent("onafterprint",function(){n(e),clearTimeout(g._removeSheetTimer),g._removeSheetTimer=setTimeout(b,500)}),a.printShived=!0,a)}var p,q,r="3.7.3",s=a.html5||{},t=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,u=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,v="_html5shiv",w=0,x={};!function(){try{var a=b.createElement("a");a.innerHTML="",p="hidden"in a,q=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){p=!0,q=!0}}();var y={elements:s.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:r,shivCSS:s.shivCSS!==!1,supportsUnknownElements:q,shivMethods:s.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=y,j(b);var z=/^$|\b(?:all|print)\b/,A="html5shiv",B=!q&&function(){var c=b.documentElement;return!("undefined"==typeof b.namespaces||"undefined"==typeof b.parentWindow||"undefined"==typeof c.applyElement||"undefined"==typeof c.removeNode||"undefined"==typeof a.attachEvent)}();y.type+=" print",y.shivPrint=o,o(b),"object"==typeof module&&module.exports&&(module.exports=y)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/_static/js/html5shiv.min.js b/_static/js/html5shiv.min.js new file mode 100644 index 00000000..cd1c674f --- /dev/null +++ b/_static/js/html5shiv.min.js @@ -0,0 +1,4 @@ +/** +* @preserve HTML5 Shiv 3.7.3 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed +*/ +!function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3-pre",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/_static/js/theme.js b/_static/js/theme.js new file mode 100644 index 00000000..1fddb6ee --- /dev/null +++ b/_static/js/theme.js @@ -0,0 +1 @@ +!function(n){var e={};function t(i){if(e[i])return e[i].exports;var o=e[i]={i:i,l:!1,exports:{}};return n[i].call(o.exports,o,o.exports,t),o.l=!0,o.exports}t.m=n,t.c=e,t.d=function(n,e,i){t.o(n,e)||Object.defineProperty(n,e,{enumerable:!0,get:i})},t.r=function(n){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(n,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(n,"__esModule",{value:!0})},t.t=function(n,e){if(1&e&&(n=t(n)),8&e)return n;if(4&e&&"object"==typeof n&&n&&n.__esModule)return n;var i=Object.create(null);if(t.r(i),Object.defineProperty(i,"default",{enumerable:!0,value:n}),2&e&&"string"!=typeof n)for(var o in n)t.d(i,o,function(e){return n[e]}.bind(null,o));return i},t.n=function(n){var e=n&&n.__esModule?function(){return n.default}:function(){return n};return t.d(e,"a",e),e},t.o=function(n,e){return Object.prototype.hasOwnProperty.call(n,e)},t.p="",t(t.s=0)}([function(n,e,t){t(1),n.exports=t(3)},function(n,e,t){(function(){var e="undefined"!=typeof window?window.jQuery:t(2);n.exports.ThemeNav={navBar:null,win:null,winScroll:!1,winResize:!1,linkScroll:!1,winPosition:0,winHeight:null,docHeight:null,isRunning:!1,enable:function(n){var t=this;void 0===n&&(n=!0),t.isRunning||(t.isRunning=!0,e((function(e){t.init(e),t.reset(),t.win.on("hashchange",t.reset),n&&t.win.on("scroll",(function(){t.linkScroll||t.winScroll||(t.winScroll=!0,requestAnimationFrame((function(){t.onScroll()})))})),t.win.on("resize",(function(){t.winResize||(t.winResize=!0,requestAnimationFrame((function(){t.onResize()})))})),t.onResize()})))},enableSticky:function(){this.enable(!0)},init:function(n){n(document);var e=this;this.navBar=n("div.wy-side-scroll:first"),this.win=n(window),n(document).on("click","[data-toggle='wy-nav-top']",(function(){n("[data-toggle='wy-nav-shift']").toggleClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift")})).on("click",".wy-menu-vertical .current ul li a",(function(){var t=n(this);n("[data-toggle='wy-nav-shift']").removeClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift"),e.toggleCurrent(t),e.hashChange()})).on("click","[data-toggle='rst-current-version']",(function(){n("[data-toggle='rst-versions']").toggleClass("shift-up")})),n("table.docutils:not(.field-list,.footnote,.citation)").wrap("
"),n("table.docutils.footnote").wrap("
"),n("table.docutils.citation").wrap("
"),n(".wy-menu-vertical ul").not(".simple").siblings("a").each((function(){var t=n(this);expand=n(''),expand.on("click",(function(n){return e.toggleCurrent(t),n.stopPropagation(),!1})),t.prepend(expand)}))},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),t=e.find('[href="'+n+'"]');if(0===t.length){var i=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(t=e.find('[href="#'+i.attr("id")+'"]')).length&&(t=e.find('[href="#"]'))}if(t.length>0){$(".wy-menu-vertical .current").removeClass("current").attr("aria-expanded","false"),t.addClass("current").attr("aria-expanded","true"),t.closest("li.toctree-l1").parent().addClass("current").attr("aria-expanded","true");for(let n=1;n<=10;n++)t.closest("li.toctree-l"+n).addClass("current").attr("aria-expanded","true");t[0].scrollIntoView()}}catch(n){console.log("Error expanding nav for anchor",n)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,t=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(t),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",(function(){this.linkScroll=!1}))},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current").attr("aria-expanded","false"),e.siblings().find("li.current").removeClass("current").attr("aria-expanded","false");var t=e.find("> ul li");t.length&&(t.removeClass("current").attr("aria-expanded","false"),e.toggleClass("current").attr("aria-expanded",(function(n,e){return"true"==e?"false":"true"})))}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:n.exports.ThemeNav,StickyNav:n.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],t=0;t0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + diff --git a/_static/minus.png b/_static/minus.png new file mode 100644 index 00000000..d96755fd Binary files /dev/null and b/_static/minus.png differ diff --git a/_static/plus.png b/_static/plus.png new file mode 100644 index 00000000..7107cec9 Binary files /dev/null and b/_static/plus.png differ diff --git a/_static/pygments.css b/_static/pygments.css new file mode 100644 index 00000000..0d49244e --- /dev/null +++ b/_static/pygments.css @@ -0,0 +1,75 @@ +pre { line-height: 125%; } +td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +.highlight .hll { background-color: #ffffcc } +.highlight { background: #eeffcc; } +.highlight .c { color: #408090; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #007020; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .ch { color: #408090; font-style: italic } /* Comment.Hashbang */ +.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #007020 } /* Comment.Preproc */ +.highlight .cpf { color: #408090; font-style: italic } /* Comment.PreprocFile */ +.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +.highlight .gr { color: #FF0000 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #00A000 } /* Generic.Inserted */ +.highlight .go { color: #333333 } /* Generic.Output */ +.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #0044DD } /* Generic.Traceback */ +.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #007020 } /* Keyword.Pseudo */ +.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #902000 } /* Keyword.Type */ +.highlight .m { color: #208050 } /* Literal.Number */ +.highlight .s { color: #4070a0 } /* Literal.String */ +.highlight .na { color: #4070a0 } /* Name.Attribute */ +.highlight .nb { color: #007020 } /* Name.Builtin */ +.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ +.highlight .no { color: #60add5 } /* Name.Constant */ +.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ +.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #007020 } /* Name.Exception */ +.highlight .nf { color: #06287e } /* Name.Function */ +.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ +.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #bb60d5 } /* Name.Variable */ +.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mb { color: #208050 } /* Literal.Number.Bin */ +.highlight .mf { color: #208050 } /* Literal.Number.Float */ +.highlight .mh { color: #208050 } /* Literal.Number.Hex */ +.highlight .mi { color: #208050 } /* Literal.Number.Integer */ +.highlight .mo { color: #208050 } /* Literal.Number.Oct */ +.highlight .sa { color: #4070a0 } /* Literal.String.Affix */ +.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ +.highlight .sc { color: #4070a0 } /* Literal.String.Char */ +.highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */ +.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #4070a0 } /* Literal.String.Double */ +.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ +.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ +.highlight .sx { color: #c65d09 } /* Literal.String.Other */ +.highlight .sr { color: #235388 } /* Literal.String.Regex */ +.highlight .s1 { color: #4070a0 } /* Literal.String.Single */ +.highlight .ss { color: #517918 } /* Literal.String.Symbol */ +.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ +.highlight .fm { color: #06287e } /* Name.Function.Magic */ +.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ +.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ +.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ +.highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */ +.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/_static/searchtools.js b/_static/searchtools.js new file mode 100644 index 00000000..7918c3fa --- /dev/null +++ b/_static/searchtools.js @@ -0,0 +1,574 @@ +/* + * searchtools.js + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for the full-text search. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +/** + * Simple result scoring code. + */ +if (typeof Scorer === "undefined") { + var Scorer = { + // Implement the following function to further tweak the score for each result + // The function takes a result array [docname, title, anchor, descr, score, filename] + // and returns the new score. + /* + score: result => { + const [docname, title, anchor, descr, score, filename] = result + return score + }, + */ + + // query matches the full name of an object + objNameMatch: 11, + // or matches in the last dotted part of the object name + objPartialMatch: 6, + // Additive scores depending on the priority of the object + objPrio: { + 0: 15, // used to be importantResults + 1: 5, // used to be objectResults + 2: -5, // used to be unimportantResults + }, + // Used when the priority is not in the mapping. + objPrioDefault: 0, + + // query found in title + title: 15, + partialTitle: 7, + // query found in terms + term: 5, + partialTerm: 2, + }; +} + +const _removeChildren = (element) => { + while (element && element.lastChild) element.removeChild(element.lastChild); +}; + +/** + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping + */ +const _escapeRegExp = (string) => + string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string + +const _displayItem = (item, searchTerms, highlightTerms) => { + const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; + const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; + const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; + const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; + const contentRoot = document.documentElement.dataset.content_root; + + const [docName, title, anchor, descr, score, _filename] = item; + + let listItem = document.createElement("li"); + let requestUrl; + let linkUrl; + if (docBuilder === "dirhtml") { + // dirhtml builder + let dirname = docName + "/"; + if (dirname.match(/\/index\/$/)) + dirname = dirname.substring(0, dirname.length - 6); + else if (dirname === "index/") dirname = ""; + requestUrl = contentRoot + dirname; + linkUrl = requestUrl; + } else { + // normal html builders + requestUrl = contentRoot + docName + docFileSuffix; + linkUrl = docName + docLinkSuffix; + } + let linkEl = listItem.appendChild(document.createElement("a")); + linkEl.href = linkUrl + anchor; + linkEl.dataset.score = score; + linkEl.innerHTML = title; + if (descr) { + listItem.appendChild(document.createElement("span")).innerHTML = + " (" + descr + ")"; + // highlight search terms in the description + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + } + else if (showSearchSummary) + fetch(requestUrl) + .then((responseData) => responseData.text()) + .then((data) => { + if (data) + listItem.appendChild( + Search.makeSearchSummary(data, searchTerms) + ); + // highlight search terms in the summary + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + }); + Search.output.appendChild(listItem); +}; +const _finishSearch = (resultCount) => { + Search.stopPulse(); + Search.title.innerText = _("Search Results"); + if (!resultCount) + Search.status.innerText = Documentation.gettext( + "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." + ); + else + Search.status.innerText = _( + `Search finished, found ${resultCount} page(s) matching the search query.` + ); +}; +const _displayNextItem = ( + results, + resultCount, + searchTerms, + highlightTerms, +) => { + // results left, load the summary and display it + // this is intended to be dynamic (don't sub resultsCount) + if (results.length) { + _displayItem(results.pop(), searchTerms, highlightTerms); + setTimeout( + () => _displayNextItem(results, resultCount, searchTerms, highlightTerms), + 5 + ); + } + // search finished, update title and status message + else _finishSearch(resultCount); +}; + +/** + * Default splitQuery function. Can be overridden in ``sphinx.search`` with a + * custom function per language. + * + * The regular expression works by splitting the string on consecutive characters + * that are not Unicode letters, numbers, underscores, or emoji characters. + * This is the same as ``\W+`` in Python, preserving the surrogate pair area. + */ +if (typeof splitQuery === "undefined") { + var splitQuery = (query) => query + .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu) + .filter(term => term) // remove remaining empty strings +} + +/** + * Search Module + */ +const Search = { + _index: null, + _queued_query: null, + _pulse_status: -1, + + htmlToText: (htmlString) => { + const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); + htmlElement.querySelectorAll(".headerlink").forEach((el) => { el.remove() }); + const docContent = htmlElement.querySelector('[role="main"]'); + if (docContent !== undefined) return docContent.textContent; + console.warn( + "Content block not found. Sphinx search tries to obtain it via '[role=main]'. Could you check your theme or template." + ); + return ""; + }, + + init: () => { + const query = new URLSearchParams(window.location.search).get("q"); + document + .querySelectorAll('input[name="q"]') + .forEach((el) => (el.value = query)); + if (query) Search.performSearch(query); + }, + + loadIndex: (url) => + (document.body.appendChild(document.createElement("script")).src = url), + + setIndex: (index) => { + Search._index = index; + if (Search._queued_query !== null) { + const query = Search._queued_query; + Search._queued_query = null; + Search.query(query); + } + }, + + hasIndex: () => Search._index !== null, + + deferQuery: (query) => (Search._queued_query = query), + + stopPulse: () => (Search._pulse_status = -1), + + startPulse: () => { + if (Search._pulse_status >= 0) return; + + const pulse = () => { + Search._pulse_status = (Search._pulse_status + 1) % 4; + Search.dots.innerText = ".".repeat(Search._pulse_status); + if (Search._pulse_status >= 0) window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something (or wait until index is loaded) + */ + performSearch: (query) => { + // create the required interface elements + const searchText = document.createElement("h2"); + searchText.textContent = _("Searching"); + const searchSummary = document.createElement("p"); + searchSummary.classList.add("search-summary"); + searchSummary.innerText = ""; + const searchList = document.createElement("ul"); + searchList.classList.add("search"); + + const out = document.getElementById("search-results"); + Search.title = out.appendChild(searchText); + Search.dots = Search.title.appendChild(document.createElement("span")); + Search.status = out.appendChild(searchSummary); + Search.output = out.appendChild(searchList); + + const searchProgress = document.getElementById("search-progress"); + // Some themes don't use the search progress node + if (searchProgress) { + searchProgress.innerText = _("Preparing search..."); + } + Search.startPulse(); + + // index already loaded, the browser was quick! + if (Search.hasIndex()) Search.query(query); + else Search.deferQuery(query); + }, + + /** + * execute search (requires search index to be loaded) + */ + query: (query) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + const allTitles = Search._index.alltitles; + const indexEntries = Search._index.indexentries; + + // stem the search terms and add them to the correct list + const stemmer = new Stemmer(); + const searchTerms = new Set(); + const excludedTerms = new Set(); + const highlightTerms = new Set(); + const objectTerms = new Set(splitQuery(query.toLowerCase().trim())); + splitQuery(query.trim()).forEach((queryTerm) => { + const queryTermLower = queryTerm.toLowerCase(); + + // maybe skip this "word" + // stopwords array is from language_data.js + if ( + stopwords.indexOf(queryTermLower) !== -1 || + queryTerm.match(/^\d+$/) + ) + return; + + // stem the word + let word = stemmer.stemWord(queryTermLower); + // select the correct list + if (word[0] === "-") excludedTerms.add(word.substr(1)); + else { + searchTerms.add(word); + highlightTerms.add(queryTermLower); + } + }); + + if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js + localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) + } + + // console.debug("SEARCH: searching for:"); + // console.info("required: ", [...searchTerms]); + // console.info("excluded: ", [...excludedTerms]); + + // array of [docname, title, anchor, descr, score, filename] + let results = []; + _removeChildren(document.getElementById("search-progress")); + + const queryLower = query.toLowerCase(); + for (const [title, foundTitles] of Object.entries(allTitles)) { + if (title.toLowerCase().includes(queryLower) && (queryLower.length >= title.length/2)) { + for (const [file, id] of foundTitles) { + let score = Math.round(100 * queryLower.length / title.length) + results.push([ + docNames[file], + titles[file] !== title ? `${titles[file]} > ${title}` : title, + id !== null ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // search for explicit entries in index directives + for (const [entry, foundEntries] of Object.entries(indexEntries)) { + if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { + for (const [file, id] of foundEntries) { + let score = Math.round(100 * queryLower.length / entry.length) + results.push([ + docNames[file], + titles[file], + id ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // lookup as object + objectTerms.forEach((term) => + results.push(...Search.performObjectSearch(term, objectTerms)) + ); + + // lookup as search terms in fulltext + results.push(...Search.performTermsSearch(searchTerms, excludedTerms)); + + // let the scorer override scores with a custom scoring function + if (Scorer.score) results.forEach((item) => (item[4] = Scorer.score(item))); + + // now sort the results by score (in opposite order of appearance, since the + // display function below uses pop() to retrieve items) and then + // alphabetically + results.sort((a, b) => { + const leftScore = a[4]; + const rightScore = b[4]; + if (leftScore === rightScore) { + // same score: sort alphabetically + const leftTitle = a[1].toLowerCase(); + const rightTitle = b[1].toLowerCase(); + if (leftTitle === rightTitle) return 0; + return leftTitle > rightTitle ? -1 : 1; // inverted is intentional + } + return leftScore > rightScore ? 1 : -1; + }); + + // remove duplicate search results + // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept + let seen = new Set(); + results = results.reverse().reduce((acc, result) => { + let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(','); + if (!seen.has(resultStr)) { + acc.push(result); + seen.add(resultStr); + } + return acc; + }, []); + + results = results.reverse(); + + // for debugging + //Search.lastresults = results.slice(); // a copy + // console.info("search results:", Search.lastresults); + + // print the results + _displayNextItem(results, results.length, searchTerms, highlightTerms); + }, + + /** + * search for object names + */ + performObjectSearch: (object, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const objects = Search._index.objects; + const objNames = Search._index.objnames; + const titles = Search._index.titles; + + const results = []; + + const objectSearchCallback = (prefix, match) => { + const name = match[4] + const fullname = (prefix ? prefix + "." : "") + name; + const fullnameLower = fullname.toLowerCase(); + if (fullnameLower.indexOf(object) < 0) return; + + let score = 0; + const parts = fullnameLower.split("."); + + // check for different match types: exact matches of full name or + // "last name" (i.e. last dotted part) + if (fullnameLower === object || parts.slice(-1)[0] === object) + score += Scorer.objNameMatch; + else if (parts.slice(-1)[0].indexOf(object) > -1) + score += Scorer.objPartialMatch; // matches in last name + + const objName = objNames[match[1]][2]; + const title = titles[match[0]]; + + // If more than one term searched for, we require other words to be + // found in the name/title/description + const otherTerms = new Set(objectTerms); + otherTerms.delete(object); + if (otherTerms.size > 0) { + const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase(); + if ( + [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0) + ) + return; + } + + let anchor = match[3]; + if (anchor === "") anchor = fullname; + else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname; + + const descr = objName + _(", in ") + title; + + // add custom score for some objects according to scorer + if (Scorer.objPrio.hasOwnProperty(match[2])) + score += Scorer.objPrio[match[2]]; + else score += Scorer.objPrioDefault; + + results.push([ + docNames[match[0]], + fullname, + "#" + anchor, + descr, + score, + filenames[match[0]], + ]); + }; + Object.keys(objects).forEach((prefix) => + objects[prefix].forEach((array) => + objectSearchCallback(prefix, array) + ) + ); + return results; + }, + + /** + * search for full-text terms in the index + */ + performTermsSearch: (searchTerms, excludedTerms) => { + // prepare search + const terms = Search._index.terms; + const titleTerms = Search._index.titleterms; + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + + const scoreMap = new Map(); + const fileMap = new Map(); + + // perform the search on the required terms + searchTerms.forEach((word) => { + const files = []; + const arr = [ + { files: terms[word], score: Scorer.term }, + { files: titleTerms[word], score: Scorer.title }, + ]; + // add support for partial matches + if (word.length > 2) { + const escapedWord = _escapeRegExp(word); + Object.keys(terms).forEach((term) => { + if (term.match(escapedWord) && !terms[word]) + arr.push({ files: terms[term], score: Scorer.partialTerm }); + }); + Object.keys(titleTerms).forEach((term) => { + if (term.match(escapedWord) && !titleTerms[word]) + arr.push({ files: titleTerms[word], score: Scorer.partialTitle }); + }); + } + + // no match but word was a required one + if (arr.every((record) => record.files === undefined)) return; + + // found search word in contents + arr.forEach((record) => { + if (record.files === undefined) return; + + let recordFiles = record.files; + if (recordFiles.length === undefined) recordFiles = [recordFiles]; + files.push(...recordFiles); + + // set score for the word in each file + recordFiles.forEach((file) => { + if (!scoreMap.has(file)) scoreMap.set(file, {}); + scoreMap.get(file)[word] = record.score; + }); + }); + + // create the mapping + files.forEach((file) => { + if (fileMap.has(file) && fileMap.get(file).indexOf(word) === -1) + fileMap.get(file).push(word); + else fileMap.set(file, [word]); + }); + }); + + // now check if the files don't contain excluded terms + const results = []; + for (const [file, wordList] of fileMap) { + // check if all requirements are matched + + // as search terms with length < 3 are discarded + const filteredTermCount = [...searchTerms].filter( + (term) => term.length > 2 + ).length; + if ( + wordList.length !== searchTerms.size && + wordList.length !== filteredTermCount + ) + continue; + + // ensure that none of the excluded terms is in the search result + if ( + [...excludedTerms].some( + (term) => + terms[term] === file || + titleTerms[term] === file || + (terms[term] || []).includes(file) || + (titleTerms[term] || []).includes(file) + ) + ) + break; + + // select one (max) score for the file. + const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w])); + // add result to the result list + results.push([ + docNames[file], + titles[file], + "", + null, + score, + filenames[file], + ]); + } + return results; + }, + + /** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words. + */ + makeSearchSummary: (htmlText, keywords) => { + const text = Search.htmlToText(htmlText); + if (text === "") return null; + + const textLower = text.toLowerCase(); + const actualStartPosition = [...keywords] + .map((k) => textLower.indexOf(k.toLowerCase())) + .filter((i) => i > -1) + .slice(-1)[0]; + const startWithContext = Math.max(actualStartPosition - 120, 0); + + const top = startWithContext === 0 ? "" : "..."; + const tail = startWithContext + 240 < text.length ? "..." : ""; + + let summary = document.createElement("p"); + summary.classList.add("context"); + summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; + + return summary; + }, +}; + +_ready(Search.init); diff --git a/_static/sphinx_highlight.js b/_static/sphinx_highlight.js new file mode 100644 index 00000000..8a96c69a --- /dev/null +++ b/_static/sphinx_highlight.js @@ -0,0 +1,154 @@ +/* Highlighting utilities for Sphinx HTML documentation. */ +"use strict"; + +const SPHINX_HIGHLIGHT_ENABLED = true + +/** + * highlight a given string on a node by wrapping it in + * span elements with the given class name. + */ +const _highlight = (node, addItems, text, className) => { + if (node.nodeType === Node.TEXT_NODE) { + const val = node.nodeValue; + const parent = node.parentNode; + const pos = val.toLowerCase().indexOf(text); + if ( + pos >= 0 && + !parent.classList.contains(className) && + !parent.classList.contains("nohighlight") + ) { + let span; + + const closestNode = parent.closest("body, svg, foreignObject"); + const isInSVG = closestNode && closestNode.matches("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.classList.add(className); + } + + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + const rest = document.createTextNode(val.substr(pos + text.length)); + parent.insertBefore( + span, + parent.insertBefore( + rest, + node.nextSibling + ) + ); + node.nodeValue = val.substr(0, pos); + /* There may be more occurrences of search term in this node. So call this + * function recursively on the remaining fragment. + */ + _highlight(rest, addItems, text, className); + + if (isInSVG) { + const rect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect" + ); + const bbox = parent.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute("class", className); + addItems.push({ parent: parent, target: rect }); + } + } + } else if (node.matches && !node.matches("button, select, textarea")) { + node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); + } +}; +const _highlightText = (thisNode, text, className) => { + let addItems = []; + _highlight(thisNode, addItems, text, className); + addItems.forEach((obj) => + obj.parent.insertAdjacentElement("beforebegin", obj.target) + ); +}; + +/** + * Small JavaScript module for the documentation. + */ +const SphinxHighlight = { + + /** + * highlight the search words provided in localstorage in the text + */ + highlightSearchWords: () => { + if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight + + // get and clear terms from localstorage + const url = new URL(window.location); + const highlight = + localStorage.getItem("sphinx_highlight_terms") + || url.searchParams.get("highlight") + || ""; + localStorage.removeItem("sphinx_highlight_terms") + url.searchParams.delete("highlight"); + window.history.replaceState({}, "", url); + + // get individual terms from highlight string + const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + if (terms.length === 0) return; // nothing to do + + // There should never be more than one element matching "div.body" + const divBody = document.querySelectorAll("div.body"); + const body = divBody.length ? divBody[0] : document.querySelector("body"); + window.setTimeout(() => { + terms.forEach((term) => _highlightText(body, term, "highlighted")); + }, 10); + + const searchBox = document.getElementById("searchbox"); + if (searchBox === null) return; + searchBox.appendChild( + document + .createRange() + .createContextualFragment( + '" + ) + ); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords: () => { + document + .querySelectorAll("#searchbox .highlight-link") + .forEach((el) => el.remove()); + document + .querySelectorAll("span.highlighted") + .forEach((el) => el.classList.remove("highlighted")); + localStorage.removeItem("sphinx_highlight_terms") + }, + + initEscapeListener: () => { + // only install a listener if it is really needed + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; + if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { + SphinxHighlight.hideSearchWords(); + event.preventDefault(); + } + }); + }, +}; + +_ready(() => { + /* Do not call highlightSearchWords() when we are on the search page. + * It will highlight words from the *previous* search query. + */ + if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords(); + SphinxHighlight.initEscapeListener(); +}); diff --git a/api.html b/api.html new file mode 100644 index 00000000..0dbfd88b --- /dev/null +++ b/api.html @@ -0,0 +1,179 @@ + + + + + + + <no title> — elm 0.0.1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + + + + + + +

elm

Energy Language Model

+ + +
+
+
+ +
+ +
+

© Copyright 2023, Alliance for Sustainable Energy, LLC.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/genindex.html b/genindex.html new file mode 100644 index 00000000..b46f0ff6 --- /dev/null +++ b/genindex.html @@ -0,0 +1,862 @@ + + + + + + Index — elm 0.0.1 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +

Index

+ +
+ _ + | A + | B + | C + | D + | E + | F + | G + | H + | I + | K + | L + | M + | O + | P + | R + | S + | T + | U + | V + | W + | Y + +
+

_

+ + + +
+ +

A

+ + + +
+ +

B

+ + +
+ +

C

+ + + +
+ +

D

+ + + +
+ +

E

+ + + +
    +
  • + elm + +
  • +
  • + elm.base + +
  • +
  • + elm.chunk + +
  • +
  • + elm.embed + +
  • +
  • + elm.osti + +
  • +
  • + elm.pdf + +
  • +
  • + elm.summary + +
  • +
  • + elm.tree + +
  • +
  • + elm.version + +
  • +
+ +

F

+ + + +
+ +

G

+ + + +
+ +

H

+ + + +
+ +

I

+ + + +
+ +

K

+ + +
+ +

L

+ + +
+ +

M

+ + + +
+ +

O

+ + + +
+ +

P

+ + + +
+ +

R

+ + + +
+ +

S

+ + + +
+ +

T

+ + +
+ +

U

+ + + +
+ +

V

+ + + +
+ +

W

+ + +
+ +

Y

+ + +
+ + + +
+
+
+ +
+ +
+

© Copyright 2023, Alliance for Sustainable Energy, LLC.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/index.html b/index.html new file mode 100644 index 00000000..bf6590fc --- /dev/null +++ b/index.html @@ -0,0 +1,222 @@ + + + + + + + Energy Language Model (ELM) — elm 0.0.1 documentation + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+
+
+

Energy Language Model (ELM)

+https://github.com/NREL/elm/workflows/Documentation/badge.svg +https://github.com/NREL/elm/workflows/pytests/badge.svg +https://github.com/NREL/elm/workflows/Lint%20Code%20Base/badge.svg +https://img.shields.io/pypi/pyversions/NREL-elm.svg +https://badge.fury.io/py/NREL-elm.svg +

The Energy Language Model (ELM) software provides interfaces to apply Large Language Models (LLMs) like ChatGPT and GPT-4 to energy research. For example, you might be interested in:

+ +
+

Installing ELM

+

Option #1 (basic usage):

+
    +
  1. pip install NREL-elm

  2. +
+

Option #2 (developer install):

+
    +
  1. from home dir, git clone git@github.com:NREL/elm.git

  2. +
  3. +
    Create elm environment and install package
      +
    1. Create a conda env: conda create -n elm

    2. +
    3. Run the command: conda activate elm

    4. +
    5. cd into the repo cloned in 1.

    6. +
    7. Prior to running pip below, make sure the branch is correct (install +from main!)

    8. +
    9. Install elm and its dependencies by running: +pip install . (or pip install -e . if running a dev branch +or working on the source code)

    10. +
    +
    +
    +
  4. +
+
+
+

Acknowledgments

+

This work was authored by the National Renewable Energy Laboratory, operated by Alliance for Sustainable Energy, LLC, for the U.S. Department of Energy (DOE) under Contract No. DE-AC36-08GO28308. Funding provided by the DOE Wind Energy Technologies Office (WETO), the DOE Solar Energy Technologies Office (SETO), and internal research funds at the National Renewable Energy Laboratory. The views expressed in the article do not necessarily represent the views of the DOE or the U.S. Government. The U.S. Government retains and the publisher, by accepting the article for publication, acknowledges that the U.S. Government retains a nonexclusive, paid-up, irrevocable, worldwide license to publish or reproduce the published form of this work, or allow others to do so, for U.S. Government purposes.

+
+
+ + +
+
+
+ +
+ +
+

© Copyright 2023, Alliance for Sustainable Energy, LLC.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/installation.html b/installation.html new file mode 100644 index 00000000..1ef22dd5 --- /dev/null +++ b/installation.html @@ -0,0 +1,202 @@ + + + + + + + Installation — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Installation

+

Option #1 (basic usage):

+
    +
  1. pip install NREL-elm

  2. +
+

Option #2 (developer install):

+
    +
  1. from home dir, git clone git@github.com:NREL/elm.git

  2. +
  3. +
    Create elm environment and install package
      +
    1. Create a conda env: conda create -n elm

    2. +
    3. Run the command: conda activate elm

    4. +
    5. cd into the repo cloned in 1.

    6. +
    7. Prior to running pip below, make sure the branch is correct (install +from main!)

    8. +
    9. Install elm and its dependencies by running: +pip install . (or pip install -e . if running a dev branch +or working on the source code)

    10. +
    +
    +
    +
  4. +
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/objects.inv b/objects.inv new file mode 100644 index 00000000..5d4f9f4f Binary files /dev/null and b/objects.inv differ diff --git a/py-modindex.html b/py-modindex.html new file mode 100644 index 00000000..dad620e2 --- /dev/null +++ b/py-modindex.html @@ -0,0 +1,236 @@ + + + + + + Python Module Index — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + +

Python Module Index

+ +
+ e +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
 
+ e
+ elm +
    + elm.base +
    + elm.chunk +
    + elm.embed +
    + elm.osti +
    + elm.pdf +
    + elm.summary +
    + elm.tree +
    + elm.version +
    + elm.wizard +
+ + +
+
+
+ +
+ +
+

© Copyright 2023, Alliance for Sustainable Energy, LLC.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + \ No newline at end of file diff --git a/search.html b/search.html new file mode 100644 index 00000000..5bcaaab7 --- /dev/null +++ b/search.html @@ -0,0 +1,191 @@ + + + + + + Search — elm 0.0.1 documentation + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + + + +
+ +
+ +
+
+
+ +
+ +
+

© Copyright 2023, Alliance for Sustainable Energy, LLC.

+
+ + Built with Sphinx using a + theme + provided by Read the Docs. + + +
+
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/searchindex.js b/searchindex.js new file mode 100644 index 00000000..65ba9748 --- /dev/null +++ b/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({"docnames": ["_autosummary/elm", "_autosummary/elm.base", "_autosummary/elm.base.ApiBase", "_autosummary/elm.base.ApiQueue", "_autosummary/elm.chunk", "_autosummary/elm.chunk.Chunker", "_autosummary/elm.embed", "_autosummary/elm.embed.ChunkAndEmbed", "_autosummary/elm.osti", "_autosummary/elm.osti.OstiList", "_autosummary/elm.osti.OstiRecord", "_autosummary/elm.pdf", "_autosummary/elm.pdf.PDFtoTXT", "_autosummary/elm.summary", "_autosummary/elm.summary.Summary", "_autosummary/elm.tree", "_autosummary/elm.tree.DecisionTree", "_autosummary/elm.version", "_autosummary/elm.wizard", "_autosummary/elm.wizard.EnergyWizard", "api", "index", "installation"], "filenames": ["_autosummary/elm.rst", "_autosummary/elm.base.rst", "_autosummary/elm.base.ApiBase.rst", "_autosummary/elm.base.ApiQueue.rst", "_autosummary/elm.chunk.rst", "_autosummary/elm.chunk.Chunker.rst", "_autosummary/elm.embed.rst", "_autosummary/elm.embed.ChunkAndEmbed.rst", "_autosummary/elm.osti.rst", "_autosummary/elm.osti.OstiList.rst", "_autosummary/elm.osti.OstiRecord.rst", "_autosummary/elm.pdf.rst", "_autosummary/elm.pdf.PDFtoTXT.rst", "_autosummary/elm.summary.rst", "_autosummary/elm.summary.Summary.rst", "_autosummary/elm.tree.rst", "_autosummary/elm.tree.DecisionTree.rst", "_autosummary/elm.version.rst", "_autosummary/elm.wizard.rst", "_autosummary/elm.wizard.EnergyWizard.rst", "api.rst", "index.rst", "installation.rst"], "titles": ["elm", "elm.base", "elm.base.ApiBase", "elm.base.ApiQueue", "elm.chunk", "elm.chunk.Chunker", "elm.embed", "elm.embed.ChunkAndEmbed", "elm.osti", "elm.osti.OstiList", "elm.osti.OstiRecord", "elm.pdf", "elm.pdf.PDFtoTXT", "elm.summary", "elm.summary.Summary", "elm.tree", "elm.tree.DecisionTree", "elm.version", "elm.wizard", "elm.wizard.EnergyWizard", "<no title>", "Energy Language Model (ELM)", "Installation"], "terms": {"energi": [0, 14, 18, 19], "languag": 0, "model": [0, 2, 3, 5, 7, 12, 14, 16, 19], "abstract": 1, "class": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19], "api": [1, 2, 3, 5, 7, 9, 10, 12, 14, 16, 19], "call": [1, 2, 3, 5, 7, 12, 14, 16, 19], "none": [2, 3, 5, 7, 9, 10, 12, 14, 19], "sourc": [2, 3, 5, 7, 9, 10, 12, 14, 16, 19, 21, 22], "abc": 2, "pars": [2, 3, 5, 7, 12, 14, 19], "text": [2, 4, 5, 6, 7, 10, 11, 12, 14, 16, 19, 21], "from": [2, 5, 7, 8, 9, 10, 12, 14, 16, 19, 21, 22], "pdf": [2, 5, 7, 9, 10, 14, 19, 21], "document": [2, 5, 12, 14, 21], "paramet": [2, 3, 5, 7, 9, 10, 12, 14, 16, 19], "str": [2, 3, 5, 7, 9, 10, 12, 14, 16, 19], "option": [2, 3, 5, 7, 12, 14, 19, 21, 22], "specif": [2, 5, 7, 12, 14, 19], "openai": [2, 3, 5, 7, 12, 14, 19], "us": [2, 3, 5, 7, 12, 14, 19, 21], "default": [2, 5, 7, 9, 10, 12, 14, 19], "i": [2, 3, 5, 7, 9, 10, 12, 14, 16, 19, 21, 22], "cl": [2, 7, 12], "default_model": [2, 5, 7, 12, 14, 19], "method": [2, 3, 5, 7, 9, 10, 12, 14, 16, 19], "attribut": [2, 3, 5, 7, 9, 10, 12, 14, 16, 19], "gpt": [2, 3, 5, 7, 12, 14, 16, 19, 21], "3": [2, 3, 5, 7, 12, 14, 19], "5": [2, 3, 5, 7, 12, 14, 19], "turbo": [2, 3, 5, 7, 12, 14, 16, 19], "do": [2, 3, 5, 7, 12, 14, 19, 21], "clean": [2, 5, 7, 10, 12, 14, 19], "embedding_model": [2, 5, 7, 12, 14, 19], "embed": [2, 3, 5, 6, 7, 12, 14, 19, 21], "ada": [2, 5, 7, 12, 14, 19], "002": [2, 5, 7, 12, 14, 19], "embedding_url": [2, 5, 7, 12, 14, 19], "http": [2, 3, 5, 7, 9, 12, 14, 19], "com": [2, 3, 5, 7, 12, 14, 19, 21, 22], "v1": [2, 3, 5, 7, 9, 12, 14, 19], "url": [2, 3, 5, 7, 9, 10, 12, 14, 19], "chat": [2, 3, 5, 7, 12, 14, 19], "complet": [2, 3, 5, 7, 12, 14, 19], "header": [2, 3, 5, 7, 12, 14, 19], "author": [2, 3, 5, 7, 10, 12, 14, 19, 21], "bearer": [2, 3, 5, 7, 12, 14, 19], "content": [2, 3, 5, 7, 12, 14, 19], "type": [2, 3, 5, 7, 12, 14, 19], "applic": [2, 3, 5, 7, 12, 14, 19], "json": [2, 3, 5, 7, 12, 14, 19], "kei": [2, 5, 7, 9, 10, 12, 14, 19], "model_rol": [2, 5, 7, 12, 14, 19], "you": [2, 3, 5, 7, 12, 14, 16, 19, 21], "ar": [2, 3, 5, 7, 9, 10, 12, 14, 16, 19], "research": [2, 5, 7, 12, 13, 14, 19, 21], "assist": [2, 5, 7, 12, 14, 19], "answer": [2, 5, 7, 12, 14, 19], "question": [2, 5, 7, 12, 14, 19], "high": [2, 5, 7, 12, 14, 16, 19], "level": [2, 5, 7, 12, 14, 16, 19], "role": [2, 3, 5, 7, 12, 14, 19], "properti": [2, 3, 5, 7, 9, 10, 12, 14, 16, 19], "all_messages_txt": [2, 5, 7, 12, 14, 16, 19], "get": [2, 3, 5, 7, 9, 10, 12, 14, 16, 19], "string": [2, 3, 5, 7, 9, 12, 14, 19], "printout": [2, 5, 7, 12, 14, 16, 19], "full": [2, 5, 7, 12, 14, 16, 19], "convers": [2, 5, 7, 12, 14, 16, 19], "llm": [2, 5, 7, 12, 13, 14, 16, 19, 21], "return": [2, 3, 5, 7, 9, 10, 12, 14, 16, 19], "clear": [2, 5, 7, 9, 10, 12, 14, 19], "histori": [2, 5, 7, 12, 14, 16, 19], "reduc": [2, 5, 7, 12, 14, 19], "messag": [2, 3, 5, 7, 12, 14, 16, 19], "just": [2, 5, 7, 12, 14, 19], "initi": [2, 5, 7, 9, 12, 14, 19], "async": [2, 3, 5, 7, 12, 14, 19], "static": [2, 5, 7, 10, 12, 14, 19], "call_api": [2, 5, 7, 12, 14, 19], "request_json": [2, 3, 5, 7, 12, 14, 19], "make": [2, 5, 7, 12, 14, 19, 21, 22], "an": [2, 3, 5, 7, 9, 10, 12, 14, 16, 19, 21], "asyncron": [2, 3, 5, 7, 12, 14, 19], "typic": [2, 3, 5, 7, 9, 10, 12, 14, 16, 19], "either": [2, 3, 5, 7, 10, 12, 14, 19], "dict": [2, 3, 5, 7, 10, 12, 14, 19], "f": [2, 3, 5, 7, 10, 12, 14, 19], "api_kei": [2, 3, 5, 7, 12, 14, 19], "data": [2, 3, 5, 7, 8, 10, 12, 14, 19, 21], "input": [2, 3, 5, 7, 9, 12, 14, 16, 19], "look": [2, 3, 5, 7, 12, 14, 19], "like": [2, 3, 5, 7, 10, 12, 14, 19, 21], "thi": [2, 3, 5, 7, 9, 10, 12, 14, 16, 19, 21], "system": [2, 3, 5, 7, 12, 14, 19], "user": [2, 3, 5, 7, 12, 14, 19], "temperatur": [2, 3, 5, 7, 12, 14, 19], "0": [2, 3, 5, 7, 9, 12, 14, 19], "out": [2, 3, 5, 7, 9, 10, 12, 14, 16, 19], "respons": [2, 3, 5, 7, 9, 10, 12, 14, 16, 19], "format": [2, 5, 7, 12, 14, 16, 19], "call_api_async": [2, 5, 7, 12, 14, 19], "all_request_json": [2, 3, 5, 7, 12, 14, 19], "ignore_error": [2, 3, 5, 7, 12, 14, 19], "rate_limit": [2, 3, 5, 7, 12, 14, 19], "40000": [2, 3, 5, 7, 12, 14, 19], "raw": [2, 5, 7, 12, 14, 19], "parallel": [2, 3, 5, 7, 12, 14, 19], "note": [2, 3, 5, 7, 12, 14, 19], "need": [2, 5, 7, 12, 14, 16, 19], "await": [2, 5, 7, 12, 14, 19], "command": [2, 5, 7, 12, 14, 19, 21, 22], "ipython": [2, 5, 7, 12, 14, 19], "jupyt": [2, 5, 7, 12, 14, 19], "e": [2, 5, 7, 9, 10, 12, 14, 19, 21, 22], "g": [2, 5, 7, 12, 14, 16, 19], "pdftotxt": [2, 5, 7, 14, 19], "clean_txt_async": [2, 5, 7, 12, 14, 19], "list": [2, 3, 5, 7, 9, 10, 12, 14, 16, 19], "one": [2, 3, 5, 7, 9, 12, 14, 19], "entri": [2, 3, 5, 7, 9, 12, 14, 19], "callabl": [2, 3, 5, 7, 12, 14, 16, 19], "error": [2, 3, 5, 7, 12, 14, 19], "If": [2, 3, 5, 7, 9, 10, 12, 14, 19], "true": [2, 3, 5, 7, 12, 14, 19], "ignor": [2, 3, 5, 7, 12, 14, 19], "tri": [2, 3, 5, 7, 12, 14, 19], "again": [2, 3, 5, 7, 12, 14, 19], "output": [2, 3, 5, 7, 12, 14, 19], "empti": [2, 3, 5, 7, 9, 10, 12, 14, 19], "float": [2, 3, 5, 7, 12, 14, 19], "rate": [2, 3, 5, 7, 12, 14, 19], "limit": [2, 3, 5, 7, 12, 14, 19], "token": [2, 3, 5, 7, 12, 14, 19], "minut": [2, 3, 5, 7, 12, 14, 19], "90k": [2, 3, 5, 7, 12, 14, 19], "4": [2, 3, 5, 7, 12, 14, 19, 21], "2023": [2, 3, 5, 7, 12, 14, 19], "we": [2, 3, 5, 7, 12, 14, 19], "re": [2, 3, 5, 7, 12, 14, 19], "larg": [2, 3, 5, 7, 12, 14, 19, 21], "factor": [2, 3, 5, 7, 12, 14, 19], "safeti": [2, 3, 5, 7, 12, 14, 19], "1": [2, 3, 5, 7, 9, 12, 14, 19, 21, 22], "2": [2, 3, 5, 7, 10, 12, 14, 19, 21, 22], "becaus": [2, 3, 5, 7, 12, 14, 19], "can": [2, 3, 5, 7, 9, 12, 14, 16, 19], "onli": [2, 3, 5, 7, 12, 14, 19], "count": [2, 3, 5, 7, 9, 12, 14, 19], "side": [2, 3, 5, 7, 12, 14, 19], "assum": [2, 3, 5, 7, 12, 14, 19], "about": [2, 3, 5, 7, 12, 14, 19], "same": [2, 3, 5, 7, 12, 14, 19], "where": [2, 5, 7, 12, 14, 16, 19], "each": [2, 5, 7, 9, 12, 14, 19], "correspond": [2, 5, 7, 10, 12, 14, 19], "queri": [2, 5, 7, 12, 14, 19], "have": [2, 5, 7, 12, 14, 16, 19], "continu": [2, 5, 7, 12, 14], "includ": [2, 5, 7, 12, 14, 19], "context": [2, 5, 7, 12, 14], "previou": [2, 5, 7, 12, 14], "store": [2, 3, 5, 7, 12, 14], "ask": [2, 5, 7, 12, 14, 19], "chatgpt": [2, 5, 7, 12, 14, 19, 21], "measur": [2, 5, 7, 12, 14, 19], "entropi": [2, 5, 7, 12, 14, 19], "more": [2, 5, 7, 9, 12, 14, 19], "reliabl": [2, 5, 7, 12, 14, 19], "nearli": [2, 5, 7, 12, 14, 19], "determinist": [2, 5, 7, 12, 14, 19], "give": [2, 5, 7, 12, 14, 19], "creativ": [2, 5, 7, 12, 14, 19], "freedom": [2, 5, 7, 12, 14, 19], "mai": [2, 5, 7, 12, 14, 19], "factual": [2, 5, 7, 12, 14, 19], "result": [2, 5, 7, 12, 14, 19], "generic_queri": [2, 5, 7, 12, 14, 19], "gener": [2, 5, 7, 12, 14, 19], "singl": [2, 5, 7, 10, 12, 14, 16, 19], "without": [2, 5, 7, 12, 14, 16, 19], "take": [2, 5, 7, 12, 14, 19], "self": [2, 5, 7, 9, 12, 14, 19], "generic_async_queri": [2, 5, 7, 12, 14, 19], "run": [2, 3, 5, 7, 12, 14, 16, 19, 21, 22], "number": [2, 3, 5, 7, 9, 10, 12, 14, 17, 19], "asynchron": [2, 3, 5, 7, 12, 14, 19], "summari": [2, 5, 7, 12, 19], "run_async": [2, 5, 7, 12, 14, 19], "length": [2, 5, 7, 12, 14, 19], "classmethod": [2, 5, 7, 9, 12, 14, 19], "get_embed": [2, 5, 7, 12, 14, 19], "1d": [2, 5, 7, 12, 14, 19], "arrai": [2, 5, 7, 12, 14, 19], "emb": [2, 5, 12, 14, 19], "repres": [2, 5, 7, 12, 14, 19, 21], "numer": [2, 5, 7, 9, 12, 14, 19], "count_token": [2, 5, 7, 12, 14, 19], "n": [2, 5, 7, 12, 14, 19, 21, 22], "int": [2, 3, 5, 7, 9, 12, 14, 19], "max_retri": 3, "10": 3, "object": [3, 9, 10, 16], "manag": 3, "queue": 3, "submiss": 3, "time": [3, 19], "retri": 3, "befor": [3, 9, 12], "rais": [3, 9, 10], "waiting_on": 3, "job": 3, "being": [3, 16, 19], "wait": 3, "submit_job": 3, "submit": 3, "subset": 3, "hold": 3, "api_job": 3, "break": [3, 4, 5], "when": 3, "exceed": 3, "collect_job": 3, "collect": 3, "all": [3, 7, 9, 10, 12, 14, 19], "order": [3, 9, 10], "util": [4, 8, 12], "up": [4, 5, 7, 12, 14, 21], "overlap": [4, 5], "tag": 5, "tokens_per_chunk": 5, "500": [5, 14], "split_on": [5, 12], "base": [5, 7, 9, 10, 12, 14, 16, 19], "apibas": [5, 7, 12, 14, 16, 19], "veri": 5, "paragraph": [5, 7, 14], "exce": 5, "per": [5, 9], "split": [5, 12], "still": 5, "pad": 5, "bodi": [5, 14], "work": [5, 12, 14, 21, 22], "well": [5, 12, 14], "line": [5, 12, 14], "between": [5, 10, 12, 14, 16], "refer": [5, 19], "begin": 5, "nomin": 5, "sub": 5, "demark": 5, "clean_paragraph": 5, "doubl": [5, 12], "sure": [5, 7, 21, 22], "detect": 5, "is_good_paragraph": 5, "basic": [5, 12, 21, 22], "test": 5, "paragraph_token": 5, "chunk_token": 5, "merge_chunk": 5, "chunks_input": 5, "merg": 5, "until": 5, "thei": 5, "reach": 5, "integ": [5, 9, 12], "nest": 5, "indic": [5, 12, 19], "add_overlap": 5, "add": 5, "chunk_text": 5, "perform": [5, 14, 19, 21], "oper": [5, 21], "chunk_kwarg": [7, 14], "chunk": [7, 12, 14, 19, 21], "creat": [7, 9, 10, 21, 22], "piec": 7, "filepath": [7, 10, 12], "txt": 7, "file": [7, 9, 12], "contain": [7, 19], "kwarg": [7, 14], "chunker": [7, 14], "clean_tabl": 7, "tabl": [7, 19], "itself": [7, 9], "pipe": 7, "hyphen": 7, "which": [7, 9, 10, 12, 16], "how": [7, 16], "175000": 7, "serial": [7, 12], "350k": 7, "retriev": [8, 9], "n_page": 9, "handl": [9, 10], "multipl": [9, 12, 19], "record": [9, 10, 16], "request": [9, 19], "see": 9, "detail": 9, "www": 9, "gov": 9, "doc": 9, "page": [9, 12], "ha": [9, 10, 12], "20": 9, "ensur": 9, "doesnt": 9, "hang": 9, "million": 9, "base_url": 9, "append": 9, "search": [9, 16, 19], "download": [9, 10], "out_dir": 9, "directori": 9, "given": [9, 10, 12], "name": [9, 14, 16, 19], "id": [9, 10], "doe": [9, 10, 12, 16, 21], "alreadi": 9, "exist": 9, "meta": 9, "datafram": [9, 19], "pd": [9, 19], "__add__": 9, "valu": [9, 10, 19], "__mul__": 9, "end": [9, 12], "remov": [9, 10, 12], "item": [9, 10], "copi": [9, 10], "shallow": [9, 10], "occurr": 9, "extend": 9, "iter": [9, 10], "element": 9, "from_osti_id": 9, "oid": 9, "7": [9, 10, 19], "digit": [9, 10], "index": [9, 12], "start": [9, 12, 16], "stop": 9, "9223372036854775807": 9, "first": [9, 10], "valueerror": 9, "present": [9, 10], "insert": [9, 10], "pop": [9, 10], "last": [9, 10], "indexerror": 9, "rang": 9, "revers": 9, "IN": 9, "place": 9, "sort": [9, 19], "fals": [9, 12, 19], "ascend": 9, "The": [9, 14, 19, 21], "modifi": 9, "stabl": 9, "two": 9, "equal": [9, 19], "maintain": [9, 12], "function": 9, "appli": [9, 21], "onc": 9, "them": 9, "descend": 9, "accord": 9, "flag": [9, 12, 14, 19], "set": [9, 10, 12], "dictionari": 10, "form": [10, 21], "strip_nested_bracket": 10, "bracket": 10, "parenthes": 10, "titl": 10, "year": 10, "public": [10, 21], "date": 10, "doi": 10, "osti_id": 10, "fp": [10, 12], "d": 10, "fromkei": 10, "new": [10, 19], "els": [10, 16], "provid": [10, 14, 19, 21], "view": [10, 21], "": [10, 16, 21], "k": 10, "v": [10, 12, 19], "specifi": 10, "found": [10, 19], "otherwis": 10, "keyerror": 10, "popitem": 10, "pair": 10, "tupl": [10, 12, 19], "lifo": 10, "setdefault": 10, "updat": 10, "lack": 10, "In": 10, "case": 10, "follow": 10, "parser": 11, "page_rang": 12, "extract": [12, 21], "python": 12, "poorli": 12, "model_instruct": [12, 14, 19], "nthe": 12, "abov": [12, 14], "wa": [12, 21], "nice": 12, "pleas": [12, 14], "comment": 12, "ad": [12, 19], "inform": [12, 19], "instruct": 12, "brace": 12, "load_pdf": 12, "load": 12, "make_gpt_messag": 12, "pdf_raw_text": 12, "clean_txt": 12, "clean_pag": 12, "is_double_col": 12, "separ": [12, 14], "vertic": 12, "column": [12, 19], "heurist": 12, "space": 12, "bool": [12, 14, 19], "than": 12, "clean_poppl": 12, "layout": 12, "poppler": 12, "requir": [12, 19], "pdftotext": 12, "softwar": [12, 21], "freedesktop": 12, "org": 12, "origin": [12, 19], "physic": 12, "collaps": 12, "better": 12, "downstream": 12, "join": 12, "validate_clean": 12, "some": [12, 19], "check": [12, 19], "combine_pag": 12, "combin": [12, 14], "multi": 12, "clean_head": 12, "char_thresh": 12, "6": 12, "page_thresh": 12, "8": 12, "ihead": 12, "footer": 12, "duplic": 12, "across": 12, "fraction": 12, "charact": 12, "similar": 12, "consid": 12, "share": 12, "char": 12, "after": 12, "go": 12, "summar": [13, 14, 21], "distil": [13, 14], "n_word": 14, "interfac": [14, 19, 21], "recurs": [14, 21], "pre": 14, "global": [14, 19], "var": [14, 19], "desir": 14, "never": 14, "perfect": 14, "help": [14, 19], "guid": 14, "approxim": 14, "400": 14, "600": 14, "word": 14, "seem": 14, "quit": 14, "scientist": 14, "prior": [14, 21, 22], "somewhat": [14, 19], "redund": [14, 19], "quot": 14, "text_chunk": 14, "prefix": [14, 19], "engin": [14, 19], "prompt": [14, 16, 19], "arg": 14, "runtim": 14, "argument": [14, 16], "text_summari": 14, "comprehens": 14, "narr": 14, "sever": 14, "disjoint": 14, "cohes": 14, "fancy_combin": 14, "decis": [15, 16, 21], "graph": 16, "travers": 16, "direct": 16, "node": 16, "edg": 16, "transit": 16, "condit": 16, "met": 16, "exampl": [16, 19, 21], "here": [16, 19], "simpl": 16, "setup": 16, "import": 16, "log": 16, "networkx": 16, "nx": 16, "rex": 16, "init_logg": 16, "digraph": 16, "hello": 16, "grant": 16, "35": 16, "add_nod": 16, "init": [16, 19], "sai": 16, "add_edg": 16, "next": 16, "lambda": 16, "x": 16, "print": [16, 19], "acycl": 16, "logic": 16, "must": [16, 19], "instanc": 16, "should": 16, "fill": 16, "execut": 16, "act": 16, "statement": 16, "other": [16, 21], "satisfi": 16, "A": [16, 19], "call_nod": 16, "node0": 16, "successor": 16, "valid": [16, 19], "leaf": 16, "final": 16, "corpu": [19, 21], "token_budget": 19, "3500": 19, "ref_col": 19, "budget": 19, "4096": 19, "want": 19, "subtract": 19, "account": 19, "label": 19, "through": 19, "articl": [19, 21], "below": [19, 21, 22], "subsequ": 19, "cannot": 19, "write": 19, "could": 19, "find": 19, "preflight_corpu": 19, "preflight": 19, "df": 19, "cosine_dist": 19, "query_embed": 19, "comput": 19, "cosin": 19, "distanc": 19, "np": 19, "ndarrai": 19, "score": 19, "smaller": 19, "closer": 19, "rank_str": 19, "top_n": 19, "100": 19, "related": 19, "most": 19, "relat": 19, "least": 19, "top": 19, "idx": 19, "rank": 19, "engineer_queri": 19, "new_info_threshold": 19, "convo": 19, "overrid": 19, "much": 19, "prevent": 19, "semant": 19, "reset": 19, "make_ref_list": 19, "used_index": 19, "ref_list": 19, "debug": 19, "stream": 19, "print_refer": 19, "return_chat_obj": 19, "relev": 19, "send": 19, "extra": 19, "diagnost": 19, "fashion": 19, "chatcomplet": 19, "also": 19, "For": 21, "might": 21, "interest": 21, "convert": 21, "databas": 21, "vector": 21, "build": 21, "autom": 21, "workflow": 21, "tree": 21, "chatbot": 21, "your": 21, "own": 21, "usag": [21, 22], "pip": [21, 22], "nrel": [21, 22], "develop": [21, 22], "home": [21, 22], "dir": [21, 22], "git": [21, 22], "clone": [21, 22], "github": [21, 22], "environ": [21, 22], "packag": [21, 22], "conda": [21, 22], "env": [21, 22], "activ": [21, 22], "cd": [21, 22], "repo": [21, 22], "branch": [21, 22], "correct": [21, 22], "main": [21, 22], "its": [21, 22], "depend": [21, 22], "dev": [21, 22], "code": [21, 22], "nation": 21, "renew": 21, "laboratori": 21, "allianc": 21, "sustain": 21, "llc": 21, "u": 21, "depart": 21, "under": 21, "contract": 21, "No": 21, "de": 21, "ac36": 21, "08go28308": 21, "fund": 21, "wind": 21, "technologi": 21, "offic": 21, "weto": 21, "solar": 21, "seto": 21, "intern": 21, "express": 21, "necessarili": 21, "govern": 21, "retain": 21, "publish": 21, "accept": 21, "nonexclus": 21, "paid": 21, "irrevoc": 21, "worldwid": 21, "licens": 21, "reproduc": 21, "allow": 21, "so": 21, "purpos": 21, "elm": 22}, "objects": {"": [[0, 0, 0, "-", "elm"]], "elm": [[1, 0, 0, "-", "base"], [4, 0, 0, "-", "chunk"], [6, 0, 0, "-", "embed"], [8, 0, 0, "-", "osti"], [11, 0, 0, "-", "pdf"], [13, 0, 0, "-", "summary"], [15, 0, 0, "-", "tree"], [17, 0, 0, "-", "version"], [18, 0, 0, "-", "wizard"]], "elm.base": [[2, 1, 1, "", "ApiBase"], [3, 1, 1, "", "ApiQueue"]], "elm.base.ApiBase": [[2, 2, 1, "", "DEFAULT_MODEL"], [2, 2, 1, "", "EMBEDDING_MODEL"], [2, 2, 1, "", "EMBEDDING_URL"], [2, 2, 1, "", "HEADERS"], [2, 2, 1, "", "MODEL_ROLE"], [2, 2, 1, "", "URL"], [2, 3, 1, "", "all_messages_txt"], [2, 4, 1, "", "call_api"], [2, 4, 1, "", "call_api_async"], [2, 4, 1, "", "chat"], [2, 4, 1, "", "clear"], [2, 4, 1, "", "count_tokens"], [2, 4, 1, "", "generic_async_query"], [2, 4, 1, "", "generic_query"], [2, 4, 1, "", "get_embedding"]], "elm.base.ApiQueue": [[3, 4, 1, "", "collect_jobs"], [3, 4, 1, "", "run"], [3, 4, 1, "", "submit_jobs"], [3, 3, 1, "", "waiting_on"]], "elm.chunk": [[5, 1, 1, "", "Chunker"]], "elm.chunk.Chunker": [[5, 2, 1, "", "DEFAULT_MODEL"], [5, 2, 1, "", "EMBEDDING_MODEL"], [5, 2, 1, "", "EMBEDDING_URL"], [5, 2, 1, "", "HEADERS"], [5, 2, 1, "", "MODEL_ROLE"], [5, 2, 1, "", "URL"], [5, 4, 1, "", "add_overlap"], [5, 3, 1, "", "all_messages_txt"], [5, 4, 1, "", "call_api"], [5, 4, 1, "", "call_api_async"], [5, 4, 1, "", "chat"], [5, 4, 1, "", "chunk_text"], [5, 3, 1, "", "chunk_tokens"], [5, 3, 1, "", "chunks"], [5, 4, 1, "", "clean_paragraphs"], [5, 4, 1, "", "clear"], [5, 4, 1, "", "count_tokens"], [5, 4, 1, "", "generic_async_query"], [5, 4, 1, "", "generic_query"], [5, 4, 1, "", "get_embedding"], [5, 4, 1, "", "is_good_paragraph"], [5, 4, 1, "", "merge_chunks"], [5, 3, 1, "", "paragraph_tokens"], [5, 3, 1, "", "paragraphs"]], "elm.embed": [[7, 1, 1, "", "ChunkAndEmbed"]], "elm.embed.ChunkAndEmbed": [[7, 2, 1, "", "DEFAULT_MODEL"], [7, 2, 1, "", "EMBEDDING_MODEL"], [7, 2, 1, "", "EMBEDDING_URL"], [7, 2, 1, "", "HEADERS"], [7, 2, 1, "", "MODEL_ROLE"], [7, 2, 1, "", "URL"], [7, 3, 1, "", "all_messages_txt"], [7, 4, 1, "", "call_api"], [7, 4, 1, "", "call_api_async"], [7, 4, 1, "", "chat"], [7, 4, 1, "", "clean_tables"], [7, 4, 1, "", "clear"], [7, 4, 1, "", "count_tokens"], [7, 4, 1, "", "generic_async_query"], [7, 4, 1, "", "generic_query"], [7, 4, 1, "", "get_embedding"], [7, 4, 1, "", "run"], [7, 4, 1, "", "run_async"]], "elm.osti": [[9, 1, 1, "", "OstiList"], [10, 1, 1, "", "OstiRecord"]], "elm.osti.OstiList": [[9, 2, 1, "", "BASE_URL"], [9, 4, 1, "", "__add__"], [9, 4, 1, "", "__mul__"], [9, 4, 1, "", "append"], [9, 4, 1, "", "clear"], [9, 4, 1, "", "copy"], [9, 4, 1, "", "count"], [9, 4, 1, "", "download"], [9, 4, 1, "", "extend"], [9, 4, 1, "", "from_osti_ids"], [9, 4, 1, "", "index"], [9, 4, 1, "", "insert"], [9, 3, 1, "", "meta"], [9, 4, 1, "", "pop"], [9, 4, 1, "", "remove"], [9, 4, 1, "", "reverse"], [9, 4, 1, "", "sort"]], "elm.osti.OstiRecord": [[10, 3, 1, "", "authors"], [10, 4, 1, "", "clear"], [10, 4, 1, "", "copy"], [10, 3, 1, "", "date"], [10, 3, 1, "", "doi"], [10, 4, 1, "", "download"], [10, 4, 1, "", "fromkeys"], [10, 4, 1, "", "get"], [10, 4, 1, "", "items"], [10, 4, 1, "", "keys"], [10, 3, 1, "", "osti_id"], [10, 4, 1, "", "pop"], [10, 4, 1, "", "popitem"], [10, 4, 1, "", "setdefault"], [10, 4, 1, "", "strip_nested_brackets"], [10, 3, 1, "", "title"], [10, 4, 1, "", "update"], [10, 3, 1, "", "url"], [10, 4, 1, "", "values"], [10, 3, 1, "", "year"]], "elm.pdf": [[12, 1, 1, "", "PDFtoTXT"]], "elm.pdf.PDFtoTXT": [[12, 2, 1, "", "DEFAULT_MODEL"], [12, 2, 1, "", "EMBEDDING_MODEL"], [12, 2, 1, "", "EMBEDDING_URL"], [12, 2, 1, "", "HEADERS"], [12, 2, 1, "", "MODEL_INSTRUCTION"], [12, 2, 1, "", "MODEL_ROLE"], [12, 2, 1, "", "URL"], [12, 3, 1, "", "all_messages_txt"], [12, 4, 1, "", "call_api"], [12, 4, 1, "", "call_api_async"], [12, 4, 1, "", "chat"], [12, 4, 1, "", "clean_headers"], [12, 4, 1, "", "clean_poppler"], [12, 4, 1, "", "clean_txt"], [12, 4, 1, "", "clean_txt_async"], [12, 4, 1, "", "clear"], [12, 4, 1, "", "combine_pages"], [12, 4, 1, "", "count_tokens"], [12, 4, 1, "", "generic_async_query"], [12, 4, 1, "", "generic_query"], [12, 4, 1, "", "get_embedding"], [12, 4, 1, "", "is_double_col"], [12, 4, 1, "", "load_pdf"], [12, 4, 1, "", "make_gpt_messages"], [12, 4, 1, "", "validate_clean"]], "elm.summary": [[14, 1, 1, "", "Summary"]], "elm.summary.Summary": [[14, 2, 1, "", "DEFAULT_MODEL"], [14, 2, 1, "", "EMBEDDING_MODEL"], [14, 2, 1, "", "EMBEDDING_URL"], [14, 2, 1, "", "HEADERS"], [14, 2, 1, "", "MODEL_INSTRUCTION"], [14, 2, 1, "", "MODEL_ROLE"], [14, 2, 1, "", "URL"], [14, 3, 1, "", "all_messages_txt"], [14, 4, 1, "", "call_api"], [14, 4, 1, "", "call_api_async"], [14, 4, 1, "", "chat"], [14, 4, 1, "", "clear"], [14, 4, 1, "", "combine"], [14, 4, 1, "", "count_tokens"], [14, 4, 1, "", "generic_async_query"], [14, 4, 1, "", "generic_query"], [14, 4, 1, "", "get_embedding"], [14, 4, 1, "", "run"], [14, 4, 1, "", "run_async"]], "elm.tree": [[16, 1, 1, "", "DecisionTree"]], "elm.tree.DecisionTree": [[16, 3, 1, "", "all_messages_txt"], [16, 3, 1, "", "api"], [16, 4, 1, "", "call_node"], [16, 3, 1, "", "graph"], [16, 3, 1, "", "history"], [16, 3, 1, "", "messages"], [16, 4, 1, "", "run"]], "elm.wizard": [[19, 1, 1, "", "EnergyWizard"]], "elm.wizard.EnergyWizard": [[19, 2, 1, "", "DEFAULT_MODEL"], [19, 2, 1, "", "EMBEDDING_MODEL"], [19, 2, 1, "", "EMBEDDING_URL"], [19, 2, 1, "", "HEADERS"], [19, 2, 1, "", "MODEL_INSTRUCTION"], [19, 2, 1, "", "MODEL_ROLE"], [19, 2, 1, "", "URL"], [19, 3, 1, "", "all_messages_txt"], [19, 4, 1, "", "call_api"], [19, 4, 1, "", "call_api_async"], [19, 4, 1, "", "chat"], [19, 4, 1, "", "clear"], [19, 4, 1, "", "cosine_dist"], [19, 4, 1, "", "count_tokens"], [19, 4, 1, "", "engineer_query"], [19, 4, 1, "", "generic_async_query"], [19, 4, 1, "", "generic_query"], [19, 4, 1, "", "get_embedding"], [19, 4, 1, "", "make_ref_list"], [19, 4, 1, "", "preflight_corpus"], [19, 4, 1, "", "rank_strings"]]}, "objtypes": {"0": "py:module", "1": "py:class", "2": "py:attribute", "3": "py:property", "4": "py:method"}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "class", "Python class"], "2": ["py", "attribute", "Python attribute"], "3": ["py", "property", "Python property"], "4": ["py", "method", "Python method"]}, "titleterms": {"elm": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21], "base": [1, 2, 3], "apibas": 2, "apiqueu": 3, "chunk": [4, 5], "chunker": 5, "emb": [6, 7], "chunkandemb": 7, "osti": [8, 9, 10], "ostilist": 9, "ostirecord": 10, "pdf": [11, 12], "pdftotxt": 12, "summari": [13, 14], "tree": [15, 16], "decisiontre": 16, "version": 17, "wizard": [18, 19], "energywizard": 19, "energi": 21, "languag": 21, "model": 21, "instal": [21, 22], "acknowledg": 21}, "envversion": {"sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx.ext.viewcode": 1, "sphinx": 60}, "alltitles": {"elm": [[0, "module-elm"]], "elm.base": [[1, "module-elm.base"]], "elm.base.ApiBase": [[2, "elm-base-apibase"]], "elm.base.ApiQueue": [[3, "elm-base-apiqueue"]], "elm.chunk": [[4, "module-elm.chunk"]], "elm.chunk.Chunker": [[5, "elm-chunk-chunker"]], "elm.embed": [[6, "module-elm.embed"]], "elm.embed.ChunkAndEmbed": [[7, "elm-embed-chunkandembed"]], "elm.osti": [[8, "module-elm.osti"]], "elm.osti.OstiList": [[9, "elm-osti-ostilist"]], "elm.osti.OstiRecord": [[10, "elm-osti-ostirecord"]], "elm.pdf": [[11, "module-elm.pdf"]], "elm.pdf.PDFtoTXT": [[12, "elm-pdf-pdftotxt"]], "elm.summary": [[13, "module-elm.summary"]], "elm.summary.Summary": [[14, "elm-summary-summary"]], "elm.tree": [[15, "module-elm.tree"]], "elm.tree.DecisionTree": [[16, "elm-tree-decisiontree"]], "elm.version": [[17, "module-elm.version"]], "elm.wizard": [[18, "module-elm.wizard"]], "elm.wizard.EnergyWizard": [[19, "elm-wizard-energywizard"]], "Energy Language Model (ELM)": [[21, "energy-language-model-elm"]], "Installing ELM": [[21, "installing-elm"]], "Acknowledgments": [[21, "acknowledgments"]], "Installation": [[22, "installation"]]}, "indexentries": {"elm": [[0, "module-elm"]], "module": [[0, "module-elm"], [1, "module-elm.base"], [4, "module-elm.chunk"], [6, "module-elm.embed"], [8, "module-elm.osti"], [11, "module-elm.pdf"], [13, "module-elm.summary"], [15, "module-elm.tree"], [17, "module-elm.version"], [18, "module-elm.wizard"]], "elm.base": [[1, "module-elm.base"]], "apibase (class in elm.base)": [[2, "elm.base.ApiBase"]], "default_model (apibase attribute)": [[2, "elm.base.ApiBase.DEFAULT_MODEL"]], "embedding_model (apibase attribute)": [[2, "elm.base.ApiBase.EMBEDDING_MODEL"]], "embedding_url (apibase attribute)": [[2, "elm.base.ApiBase.EMBEDDING_URL"]], "headers (apibase attribute)": [[2, "elm.base.ApiBase.HEADERS"]], "model_role (apibase attribute)": [[2, "elm.base.ApiBase.MODEL_ROLE"]], "url (apibase attribute)": [[2, "elm.base.ApiBase.URL"]], "all_messages_txt (apibase property)": [[2, "elm.base.ApiBase.all_messages_txt"]], "call_api() (apibase static method)": [[2, "elm.base.ApiBase.call_api"]], "call_api_async() (apibase method)": [[2, "elm.base.ApiBase.call_api_async"]], "chat() (apibase method)": [[2, "elm.base.ApiBase.chat"]], "clear() (apibase method)": [[2, "elm.base.ApiBase.clear"]], "count_tokens() (apibase static method)": [[2, "elm.base.ApiBase.count_tokens"]], "generic_async_query() (apibase method)": [[2, "elm.base.ApiBase.generic_async_query"]], "generic_query() (apibase method)": [[2, "elm.base.ApiBase.generic_query"]], "get_embedding() (apibase class method)": [[2, "elm.base.ApiBase.get_embedding"]], "apiqueue (class in elm.base)": [[3, "elm.base.ApiQueue"]], "collect_jobs() (apiqueue method)": [[3, "elm.base.ApiQueue.collect_jobs"]], "run() (apiqueue method)": [[3, "elm.base.ApiQueue.run"]], "submit_jobs() (apiqueue method)": [[3, "elm.base.ApiQueue.submit_jobs"]], "waiting_on (apiqueue property)": [[3, "elm.base.ApiQueue.waiting_on"]], "elm.chunk": [[4, "module-elm.chunk"]], "chunker (class in elm.chunk)": [[5, "elm.chunk.Chunker"]], "default_model (chunker attribute)": [[5, "elm.chunk.Chunker.DEFAULT_MODEL"]], "embedding_model (chunker attribute)": [[5, "elm.chunk.Chunker.EMBEDDING_MODEL"]], "embedding_url (chunker attribute)": [[5, "elm.chunk.Chunker.EMBEDDING_URL"]], "headers (chunker attribute)": [[5, "elm.chunk.Chunker.HEADERS"]], "model_role (chunker attribute)": [[5, "elm.chunk.Chunker.MODEL_ROLE"]], "url (chunker attribute)": [[5, "elm.chunk.Chunker.URL"]], "add_overlap() (chunker method)": [[5, "elm.chunk.Chunker.add_overlap"]], "all_messages_txt (chunker property)": [[5, "elm.chunk.Chunker.all_messages_txt"]], "call_api() (chunker static method)": [[5, "elm.chunk.Chunker.call_api"]], "call_api_async() (chunker method)": [[5, "elm.chunk.Chunker.call_api_async"]], "chat() (chunker method)": [[5, "elm.chunk.Chunker.chat"]], "chunk_text() (chunker method)": [[5, "elm.chunk.Chunker.chunk_text"]], "chunk_tokens (chunker property)": [[5, "elm.chunk.Chunker.chunk_tokens"]], "chunks (chunker property)": [[5, "elm.chunk.Chunker.chunks"]], "clean_paragraphs() (chunker static method)": [[5, "elm.chunk.Chunker.clean_paragraphs"]], "clear() (chunker method)": [[5, "elm.chunk.Chunker.clear"]], "count_tokens() (chunker static method)": [[5, "elm.chunk.Chunker.count_tokens"]], "generic_async_query() (chunker method)": [[5, "elm.chunk.Chunker.generic_async_query"]], "generic_query() (chunker method)": [[5, "elm.chunk.Chunker.generic_query"]], "get_embedding() (chunker class method)": [[5, "elm.chunk.Chunker.get_embedding"]], "is_good_paragraph() (chunker static method)": [[5, "elm.chunk.Chunker.is_good_paragraph"]], "merge_chunks() (chunker method)": [[5, "elm.chunk.Chunker.merge_chunks"]], "paragraph_tokens (chunker property)": [[5, "elm.chunk.Chunker.paragraph_tokens"]], "paragraphs (chunker property)": [[5, "elm.chunk.Chunker.paragraphs"]], "elm.embed": [[6, "module-elm.embed"]], "chunkandembed (class in elm.embed)": [[7, "elm.embed.ChunkAndEmbed"]], "default_model (chunkandembed attribute)": [[7, "elm.embed.ChunkAndEmbed.DEFAULT_MODEL"]], "embedding_model (chunkandembed attribute)": [[7, "elm.embed.ChunkAndEmbed.EMBEDDING_MODEL"]], "embedding_url (chunkandembed attribute)": [[7, "elm.embed.ChunkAndEmbed.EMBEDDING_URL"]], "headers (chunkandembed attribute)": [[7, "elm.embed.ChunkAndEmbed.HEADERS"]], "model_role (chunkandembed attribute)": [[7, "elm.embed.ChunkAndEmbed.MODEL_ROLE"]], "url (chunkandembed attribute)": [[7, "elm.embed.ChunkAndEmbed.URL"]], "all_messages_txt (chunkandembed property)": [[7, "elm.embed.ChunkAndEmbed.all_messages_txt"]], "call_api() (chunkandembed static method)": [[7, "elm.embed.ChunkAndEmbed.call_api"]], "call_api_async() (chunkandembed method)": [[7, "elm.embed.ChunkAndEmbed.call_api_async"]], "chat() (chunkandembed method)": [[7, "elm.embed.ChunkAndEmbed.chat"]], "clean_tables() (chunkandembed static method)": [[7, "elm.embed.ChunkAndEmbed.clean_tables"]], "clear() (chunkandembed method)": [[7, "elm.embed.ChunkAndEmbed.clear"]], "count_tokens() (chunkandembed static method)": [[7, "elm.embed.ChunkAndEmbed.count_tokens"]], "generic_async_query() (chunkandembed method)": [[7, "elm.embed.ChunkAndEmbed.generic_async_query"]], "generic_query() (chunkandembed method)": [[7, "elm.embed.ChunkAndEmbed.generic_query"]], "get_embedding() (chunkandembed class method)": [[7, "elm.embed.ChunkAndEmbed.get_embedding"]], "run() (chunkandembed method)": [[7, "elm.embed.ChunkAndEmbed.run"]], "run_async() (chunkandembed method)": [[7, "elm.embed.ChunkAndEmbed.run_async"]], "elm.osti": [[8, "module-elm.osti"]], "base_url (ostilist attribute)": [[9, "elm.osti.OstiList.BASE_URL"]], "ostilist (class in elm.osti)": [[9, "elm.osti.OstiList"]], "__add__() (ostilist method)": [[9, "elm.osti.OstiList.__add__"]], "__mul__() (ostilist method)": [[9, "elm.osti.OstiList.__mul__"]], "append() (ostilist method)": [[9, "elm.osti.OstiList.append"]], "clear() (ostilist method)": [[9, "elm.osti.OstiList.clear"]], "copy() (ostilist method)": [[9, "elm.osti.OstiList.copy"]], "count() (ostilist method)": [[9, "elm.osti.OstiList.count"]], "download() (ostilist method)": [[9, "elm.osti.OstiList.download"]], "extend() (ostilist method)": [[9, "elm.osti.OstiList.extend"]], "from_osti_ids() (ostilist class method)": [[9, "elm.osti.OstiList.from_osti_ids"]], "index() (ostilist method)": [[9, "elm.osti.OstiList.index"]], "insert() (ostilist method)": [[9, "elm.osti.OstiList.insert"]], "meta (ostilist property)": [[9, "elm.osti.OstiList.meta"]], "pop() (ostilist method)": [[9, "elm.osti.OstiList.pop"]], "remove() (ostilist method)": [[9, "elm.osti.OstiList.remove"]], "reverse() (ostilist method)": [[9, "elm.osti.OstiList.reverse"]], "sort() (ostilist method)": [[9, "elm.osti.OstiList.sort"]], "ostirecord (class in elm.osti)": [[10, "elm.osti.OstiRecord"]], "authors (ostirecord property)": [[10, "elm.osti.OstiRecord.authors"]], "clear() (ostirecord method)": [[10, "elm.osti.OstiRecord.clear"]], "copy() (ostirecord method)": [[10, "elm.osti.OstiRecord.copy"]], "date (ostirecord property)": [[10, "elm.osti.OstiRecord.date"]], "doi (ostirecord property)": [[10, "elm.osti.OstiRecord.doi"]], "download() (ostirecord method)": [[10, "elm.osti.OstiRecord.download"]], "fromkeys() (ostirecord method)": [[10, "elm.osti.OstiRecord.fromkeys"]], "get() (ostirecord method)": [[10, "elm.osti.OstiRecord.get"]], "items() (ostirecord method)": [[10, "elm.osti.OstiRecord.items"]], "keys() (ostirecord method)": [[10, "elm.osti.OstiRecord.keys"]], "osti_id (ostirecord property)": [[10, "elm.osti.OstiRecord.osti_id"]], "pop() (ostirecord method)": [[10, "elm.osti.OstiRecord.pop"]], "popitem() (ostirecord method)": [[10, "elm.osti.OstiRecord.popitem"]], "setdefault() (ostirecord method)": [[10, "elm.osti.OstiRecord.setdefault"]], "strip_nested_brackets() (ostirecord static method)": [[10, "elm.osti.OstiRecord.strip_nested_brackets"]], "title (ostirecord property)": [[10, "elm.osti.OstiRecord.title"]], "update() (ostirecord method)": [[10, "elm.osti.OstiRecord.update"]], "url (ostirecord property)": [[10, "elm.osti.OstiRecord.url"]], "values() (ostirecord method)": [[10, "elm.osti.OstiRecord.values"]], "year (ostirecord property)": [[10, "elm.osti.OstiRecord.year"]], "elm.pdf": [[11, "module-elm.pdf"]], "default_model (pdftotxt attribute)": [[12, "elm.pdf.PDFtoTXT.DEFAULT_MODEL"]], "embedding_model (pdftotxt attribute)": [[12, "elm.pdf.PDFtoTXT.EMBEDDING_MODEL"]], "embedding_url (pdftotxt attribute)": [[12, "elm.pdf.PDFtoTXT.EMBEDDING_URL"]], "headers (pdftotxt attribute)": [[12, "elm.pdf.PDFtoTXT.HEADERS"]], "model_instruction (pdftotxt attribute)": [[12, "elm.pdf.PDFtoTXT.MODEL_INSTRUCTION"]], "model_role (pdftotxt attribute)": [[12, "elm.pdf.PDFtoTXT.MODEL_ROLE"]], "pdftotxt (class in elm.pdf)": [[12, "elm.pdf.PDFtoTXT"]], "url (pdftotxt attribute)": [[12, "elm.pdf.PDFtoTXT.URL"]], "all_messages_txt (pdftotxt property)": [[12, "elm.pdf.PDFtoTXT.all_messages_txt"]], "call_api() (pdftotxt static method)": [[12, "elm.pdf.PDFtoTXT.call_api"]], "call_api_async() (pdftotxt method)": [[12, "elm.pdf.PDFtoTXT.call_api_async"]], "chat() (pdftotxt method)": [[12, "elm.pdf.PDFtoTXT.chat"]], "clean_headers() (pdftotxt method)": [[12, "elm.pdf.PDFtoTXT.clean_headers"]], "clean_poppler() (pdftotxt method)": [[12, "elm.pdf.PDFtoTXT.clean_poppler"]], "clean_txt() (pdftotxt method)": [[12, "elm.pdf.PDFtoTXT.clean_txt"]], "clean_txt_async() (pdftotxt method)": [[12, "elm.pdf.PDFtoTXT.clean_txt_async"]], "clear() (pdftotxt method)": [[12, "elm.pdf.PDFtoTXT.clear"]], "combine_pages() (pdftotxt static method)": [[12, "elm.pdf.PDFtoTXT.combine_pages"]], "count_tokens() (pdftotxt static method)": [[12, "elm.pdf.PDFtoTXT.count_tokens"]], "generic_async_query() (pdftotxt method)": [[12, "elm.pdf.PDFtoTXT.generic_async_query"]], "generic_query() (pdftotxt method)": [[12, "elm.pdf.PDFtoTXT.generic_query"]], "get_embedding() (pdftotxt class method)": [[12, "elm.pdf.PDFtoTXT.get_embedding"]], "is_double_col() (pdftotxt method)": [[12, "elm.pdf.PDFtoTXT.is_double_col"]], "load_pdf() (pdftotxt method)": [[12, "elm.pdf.PDFtoTXT.load_pdf"]], "make_gpt_messages() (pdftotxt method)": [[12, "elm.pdf.PDFtoTXT.make_gpt_messages"]], "validate_clean() (pdftotxt method)": [[12, "elm.pdf.PDFtoTXT.validate_clean"]], "elm.summary": [[13, "module-elm.summary"]], "default_model (summary attribute)": [[14, "elm.summary.Summary.DEFAULT_MODEL"]], "embedding_model (summary attribute)": [[14, "elm.summary.Summary.EMBEDDING_MODEL"]], "embedding_url (summary attribute)": [[14, "elm.summary.Summary.EMBEDDING_URL"]], "headers (summary attribute)": [[14, "elm.summary.Summary.HEADERS"]], "model_instruction (summary attribute)": [[14, "elm.summary.Summary.MODEL_INSTRUCTION"]], "model_role (summary attribute)": [[14, "elm.summary.Summary.MODEL_ROLE"]], "summary (class in elm.summary)": [[14, "elm.summary.Summary"]], "url (summary attribute)": [[14, "elm.summary.Summary.URL"]], "all_messages_txt (summary property)": [[14, "elm.summary.Summary.all_messages_txt"]], "call_api() (summary static method)": [[14, "elm.summary.Summary.call_api"]], "call_api_async() (summary method)": [[14, "elm.summary.Summary.call_api_async"]], "chat() (summary method)": [[14, "elm.summary.Summary.chat"]], "clear() (summary method)": [[14, "elm.summary.Summary.clear"]], "combine() (summary method)": [[14, "elm.summary.Summary.combine"]], "count_tokens() (summary static method)": [[14, "elm.summary.Summary.count_tokens"]], "generic_async_query() (summary method)": [[14, "elm.summary.Summary.generic_async_query"]], "generic_query() (summary method)": [[14, "elm.summary.Summary.generic_query"]], "get_embedding() (summary class method)": [[14, "elm.summary.Summary.get_embedding"]], "run() (summary method)": [[14, "elm.summary.Summary.run"]], "run_async() (summary method)": [[14, "elm.summary.Summary.run_async"]], "elm.tree": [[15, "module-elm.tree"]], "decisiontree (class in elm.tree)": [[16, "elm.tree.DecisionTree"]], "all_messages_txt (decisiontree property)": [[16, "elm.tree.DecisionTree.all_messages_txt"]], "api (decisiontree property)": [[16, "elm.tree.DecisionTree.api"]], "call_node() (decisiontree method)": [[16, "elm.tree.DecisionTree.call_node"]], "graph (decisiontree property)": [[16, "elm.tree.DecisionTree.graph"]], "history (decisiontree property)": [[16, "elm.tree.DecisionTree.history"]], "messages (decisiontree property)": [[16, "elm.tree.DecisionTree.messages"]], "run() (decisiontree method)": [[16, "elm.tree.DecisionTree.run"]], "elm.version": [[17, "module-elm.version"]], "elm.wizard": [[18, "module-elm.wizard"]], "default_model (energywizard attribute)": [[19, "elm.wizard.EnergyWizard.DEFAULT_MODEL"]], "embedding_model (energywizard attribute)": [[19, "elm.wizard.EnergyWizard.EMBEDDING_MODEL"]], "embedding_url (energywizard attribute)": [[19, "elm.wizard.EnergyWizard.EMBEDDING_URL"]], "energywizard (class in elm.wizard)": [[19, "elm.wizard.EnergyWizard"]], "headers (energywizard attribute)": [[19, "elm.wizard.EnergyWizard.HEADERS"]], "model_instruction (energywizard attribute)": [[19, "elm.wizard.EnergyWizard.MODEL_INSTRUCTION"]], "model_role (energywizard attribute)": [[19, "elm.wizard.EnergyWizard.MODEL_ROLE"]], "url (energywizard attribute)": [[19, "elm.wizard.EnergyWizard.URL"]], "all_messages_txt (energywizard property)": [[19, "elm.wizard.EnergyWizard.all_messages_txt"]], "call_api() (energywizard static method)": [[19, "elm.wizard.EnergyWizard.call_api"]], "call_api_async() (energywizard method)": [[19, "elm.wizard.EnergyWizard.call_api_async"]], "chat() (energywizard method)": [[19, "elm.wizard.EnergyWizard.chat"]], "clear() (energywizard method)": [[19, "elm.wizard.EnergyWizard.clear"]], "cosine_dist() (energywizard method)": [[19, "elm.wizard.EnergyWizard.cosine_dist"]], "count_tokens() (energywizard static method)": [[19, "elm.wizard.EnergyWizard.count_tokens"]], "engineer_query() (energywizard method)": [[19, "elm.wizard.EnergyWizard.engineer_query"]], "generic_async_query() (energywizard method)": [[19, "elm.wizard.EnergyWizard.generic_async_query"]], "generic_query() (energywizard method)": [[19, "elm.wizard.EnergyWizard.generic_query"]], "get_embedding() (energywizard class method)": [[19, "elm.wizard.EnergyWizard.get_embedding"]], "make_ref_list() (energywizard method)": [[19, "elm.wizard.EnergyWizard.make_ref_list"]], "preflight_corpus() (energywizard static method)": [[19, "elm.wizard.EnergyWizard.preflight_corpus"]], "rank_strings() (energywizard method)": [[19, "elm.wizard.EnergyWizard.rank_strings"]]}}) \ No newline at end of file