code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
from eth_utils import (
add_0x_prefix,
apply_to_return_value,
from_wei,
is_address,
is_checksum_address,
keccak as eth_utils_keccak,
remove_0x_prefix,
to_checksum_address,
to_wei,
)
from hexbytes import (
HexBytes,
)
from ens import ENS
from web3._utils.abi import (
map_abi_data,
)
from web3._utils.decorators import (
combomethod,
)
from web3._utils.empty import (
empty,
)
from web3._utils.encoding import (
hex_encode_abi_type,
to_bytes,
to_hex,
to_int,
to_text,
to_json,
)
from web3._utils.normalizers import (
abi_ens_resolver,
)
from web3.admin import (
Admin,
)
from web3.eth import (
Eth,
)
from web3.iban import (
Iban,
)
from web3.manager import (
RequestManager as DefaultRequestManager,
)
from web3.miner import (
Miner,
)
from web3.net import (
Net,
)
from web3.parity import (
Parity,
)
from web3.personal import (
Personal,
)
from web3.providers.eth_tester import (
EthereumTesterProvider,
)
from web3.providers.ipc import (
IPCProvider,
)
from web3.providers.rpc import (
HTTPProvider,
)
from web3.providers.websocket import (
WebsocketProvider,
)
from web3.testing import (
Testing,
)
from web3.txpool import (
TxPool,
)
from web3.version import (
Version,
)
def get_default_modules():
return {
"eth": Eth,
"net": Net,
"personal": Personal,
"version": Version,
"txpool": TxPool,
"miner": Miner,
"admin": Admin,
"parity": Parity,
"testing": Testing,
}
class Web3:
# Providers
HTTPProvider = HTTPProvider
IPCProvider = IPCProvider
EthereumTesterProvider = EthereumTesterProvider
WebsocketProvider = WebsocketProvider
# Managers
RequestManager = DefaultRequestManager
# Iban
Iban = Iban
# Encoding and Decoding
toBytes = staticmethod(to_bytes)
toInt = staticmethod(to_int)
toHex = staticmethod(to_hex)
toText = staticmethod(to_text)
toJSON = staticmethod(to_json)
# Currency Utility
toWei = staticmethod(to_wei)
fromWei = staticmethod(from_wei)
# Address Utility
isAddress = staticmethod(is_address)
isChecksumAddress = staticmethod(is_checksum_address)
toChecksumAddress = staticmethod(to_checksum_address)
def __init__(self, provider=None, middlewares=None, modules=None, ens=empty):
self.manager = self.RequestManager(self, provider, middlewares)
if modules is None:
modules = get_default_modules()
for module_name, module_class in modules.items():
module_class.attach(self, module_name)
self.ens = ens
@property
def middleware_onion(self):
return self.manager.middleware_onion
@property
def provider(self):
return self.manager.provider
@provider.setter
def provider(self, provider):
self.manager.provider = provider
@staticmethod
@apply_to_return_value(HexBytes)
def keccak(primitive=None, text=None, hexstr=None):
if isinstance(primitive, (bytes, int, type(None))):
input_bytes = to_bytes(primitive, hexstr=hexstr, text=text)
return eth_utils_keccak(input_bytes)
raise TypeError(
"You called keccak with first arg %r and keywords %r. You must call it with one of "
"these approaches: keccak(text='txt'), keccak(hexstr='0x747874'), "
"keccak(b'\\x74\\x78\\x74'), or keccak(0x747874)." % (
primitive,
{'text': text, 'hexstr': hexstr}
)
)
@combomethod
def solidityKeccak(cls, abi_types, values):
"""
Executes keccak256 exactly as Solidity does.
Takes list of abi_types as inputs -- `[uint24, int8[], bool]`
and list of corresponding values -- `[20, [-1, 5, 0], True]`
"""
if len(abi_types) != len(values):
raise ValueError(
"Length mismatch between provided abi types and values. Got "
"{0} types and {1} values.".format(len(abi_types), len(values))
)
if isinstance(cls, type):
w3 = None
else:
w3 = cls
normalized_values = map_abi_data([abi_ens_resolver(w3)], abi_types, values)
hex_string = add_0x_prefix(''.join(
remove_0x_prefix(hex_encode_abi_type(abi_type, value))
for abi_type, value
in zip(abi_types, normalized_values)
))
return cls.keccak(hexstr=hex_string)
def isConnected(self):
return self.provider.isConnected()
@property
def ens(self):
if self._ens is empty:
return ENS.fromWeb3(self)
else:
return self._ens
@ens.setter
def ens(self, new_ens):
self._ens = new_ens
@property
def pm(self):
if hasattr(self, '_pm'):
return self._pm
else:
raise AttributeError(
"The Package Management feature is disabled by default until "
"its API stabilizes. To use these features, please enable them by running "
"`w3.enable_unstable_package_management_api()` and try again."
)
def enable_unstable_package_management_api(self):
from web3.pm import PM
PM.attach(self, '_pm') | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/main.py | main.py |
from abc import (
ABC,
abstractmethod,
)
import json
from typing import (
Any,
Dict,
Iterable,
List,
NewType,
Tuple,
)
from eth_utils import (
is_canonical_address,
is_checksum_address,
to_bytes,
to_canonical_address,
to_checksum_address,
to_text,
to_tuple,
)
from eth_utils.toolz import (
concat,
)
from ethpm import (
ASSETS_DIR,
Package,
)
from ethpm.typing import (
URI,
Address,
Manifest,
)
from ethpm.utils.backend import (
resolve_uri_contents,
)
from ethpm.utils.ipfs import (
is_ipfs_uri,
)
from ethpm.utils.manifest_validation import (
validate_manifest_against_schema,
validate_raw_manifest_format,
)
from ethpm.utils.uri import (
is_valid_content_addressed_github_uri,
)
from ethpm.validation import (
validate_package_name,
validate_package_version,
)
from web3 import Web3
from web3._utils.ens import (
is_ens_name,
)
from web3.exceptions import (
InvalidAddress,
ManifestValidationError,
NameNotFound,
PMError,
)
from web3.module import (
Module,
)
TxReceipt = NewType("TxReceipt", Dict[str, Any])
# Package Management is still in alpha, and its API is likely to change, so it
# is not automatically available on a web3 instance. To use the `PM` module,
# please enable the package management API on an individual web3 instance.
#
# >>> from web3.auto import w3
# >>> w3.pm
# AttributeError: The Package Management feature is disabled by default ...
# >>> w3.enable_unstable_package_management_api()
# >>> w3.pm
# <web3.pm.PM at 0x....>
class ERCRegistry(ABC):
"""
The ERCRegistry class is a base class for all registry implementations to inherit from. It
defines the methods specified in `ERC 1319 <https://github.com/ethereum/EIPs/issues/1319>`__.
All of these methods are prefixed with an underscore, since they are not intended to be
accessed directly, but rather through the methods on ``web3.pm``. They are unlikely to change,
but must be implemented in a `ERCRegistry` subclass in order to be compatible with the
`PM` module. Any custom methods (eg. not definied in ERC1319) in a subclass
should *not* be prefixed with an underscore.
All of these methods must be implemented in any subclass in order to work with `web3.pm.PM`.
Any implementation specific logic should be handled in a subclass.
"""
@abstractmethod
def __init__(self, address: Address, w3: Web3) -> None:
"""
Initializes the class with the on-chain address of the registry, and a web3 instance
connected to the chain where the registry can be found.
Must set the following properties...
* ``self.registry``: A `web3.contract` instance of the target registry.
* ``self.address``: The address of the target registry.
* ``self.w3``: The *web3* instance connected to the chain where the registry can be found.
"""
pass
#
# Write API
#
@abstractmethod
def _release(self, package_name: str, version: str, manifest_uri: str) -> bytes:
"""
Returns the releaseId created by successfully adding a release to the registry.
* Parameters:
* ``package_name``: Valid package name according the spec.
* ``version``: Version identifier string, can conform to any versioning scheme.
* ``manifest_uri``: URI location of a manifest which details the release contents
"""
pass
#
# Read API
#
@abstractmethod
def _get_package_name(self, package_id: bytes) -> str:
"""
Returns the package name associated with the given package id, if the
package id exists on the connected registry.
* Parameters:
* ``package_id``: 32 byte package identifier.
"""
pass
@abstractmethod
def _get_all_package_ids(self) -> Tuple[bytes]:
"""
Returns a tuple containing all of the package ids found on the connected registry.
"""
pass
@abstractmethod
def _get_release_id(self, package_name: str, version: str) -> bytes:
"""
Returns the 32 bytes release id associated with the given package name and version,
if the release exists on the connected registry.
* Parameters:
* ``package_name``: Valid package name according the spec.
* ``version``: Version identifier string, can conform to any versioning scheme.
"""
pass
@abstractmethod
def _get_all_release_ids(self, package_name: str) -> Tuple[bytes]:
"""
Returns a tuple containg all of the release ids belonging to the given package name,
if the package has releases on the connected registry.
* Parameters:
* ``package_name``: Valid package name according the spec.
"""
pass
@abstractmethod
def _get_release_data(self, release_id: bytes) -> Tuple[str, str, str]:
"""
Returns a tuple containing (package_name, version, manifest_uri) for the given release id,
if the release exists on the connected registry.
* Parameters:
* ``release_id``: 32 byte release identifier.
"""
pass
@abstractmethod
def _generate_release_id(self, package_name: str, version: str) -> bytes:
"""
Returns the 32 byte release identifier that *would* be associated with the given
package name and version according to the registry's hashing mechanism.
The release *does not* have to exist on the connected registry.
* Parameters:
* ``package_name``: Valid package name according the spec.
* ``version``: Version identifier string, can conform to any versioning scheme.
"""
pass
@abstractmethod
def _num_package_ids(self) -> int:
"""
Returns the number of packages that exist on the connected registry.
"""
pass
@abstractmethod
def _num_release_ids(self, package_name: str) -> int:
"""
Returns the number of releases found on the connected registry,
that belong to the given package name.
* Parameters:
* ``package_name``: Valid package name according the spec.
"""
pass
class VyperReferenceRegistry(ERCRegistry):
"""
The ``VyperReferenceRegistry`` class implements all of the methods found in ``ERCRegistry``,
along with some custom methods included in the `implementation
<https://github.com/ethpm/py-ethpm/blob/master/ethpm/assets/vyper_registry/registry.vy>`__.
"""
def __init__(self, address: Address, w3: Web3) -> None:
# todo: validate runtime bytecode
abi = get_vyper_registry_manifest()["contract_types"]["registry"]["abi"]
self.registry = w3.eth.contract(address=address, abi=abi)
self.address = to_checksum_address(address)
self.w3 = w3
@classmethod
def deploy_new_instance(cls, w3: Web3) -> "VyperReferenceRegistry":
"""
Returns a new instance of ```VyperReferenceRegistry`` representing a freshly deployed
instance on the given ``web3`` instance of the Vyper Reference Registry implementation.
"""
manifest = get_vyper_registry_manifest()
registry_package = Package(manifest, w3)
registry_factory = registry_package.get_contract_factory("registry")
tx_hash = registry_factory.constructor().transact()
tx_receipt = w3.eth.waitForTransactionReceipt(tx_hash)
registry_address = to_canonical_address(tx_receipt.contractAddress)
return cls(registry_address, w3)
def _release(self, package_name: str, version: str, manifest_uri: str) -> bytes:
if len(package_name) > 32 or len(version) > 32:
raise PMError(
"Vyper registry only works with package names and versions less than 32 chars."
)
if len(manifest_uri) > 1000:
raise PMError(
"Vyper registry only works with manifest URIs shorter than 1000 chars."
)
args = process_vyper_args(package_name, version, manifest_uri)
tx_hash = self.registry.functions.release(*args).transact()
self.w3.eth.waitForTransactionReceipt(tx_hash)
return self._get_release_id(package_name, version)
def _get_package_name(self, package_id: bytes) -> str:
package_name = self.registry.functions.getPackageName(package_id).call()
return to_text(package_name.rstrip(b"\x00"))
@to_tuple
def _get_all_package_ids(self) -> Iterable[Tuple[bytes]]:
num_packages = self._num_package_ids()
for index in range(0, num_packages, 4):
package_ids = self.registry.functions.getAllPackageIds(index, 5).call()
for package_id in package_ids:
if package_id != b"\x00" * 32:
yield package_id
def _get_release_id(self, package_name: str, version: str) -> bytes:
actual_args = process_vyper_args(package_name, version)
return self.registry.functions.getReleaseId(*actual_args).call()
@to_tuple
def _get_all_release_ids(self, package_name: str) -> Iterable[Tuple[bytes]]:
actual_name = process_vyper_args(package_name)
num_releases = self.registry.functions.numReleaseIds(*actual_name).call()
for index in range(0, num_releases, 4):
release_ids = self.registry.functions.getAllReleaseIds(
*actual_name, index, 5
).call()
for release_id in release_ids:
if release_id != b"\x00" * 32:
yield release_id
@to_tuple
def _get_release_data(self, release_id: bytes) -> Iterable[Tuple[str]]:
release_data = self.registry.functions.getReleaseData(release_id).call()
for data in release_data:
if data != b"\x00" * 32:
yield to_text(data.rstrip(b"\x00"))
def _generate_release_id(self, package_name: str, version: str) -> bytes:
args = process_vyper_args(package_name, version)
return self.registry.functions.generateReleaseId(*args).call()
def _num_package_ids(self) -> int:
return self.registry.functions.numPackageIds().call()
def _num_release_ids(self, package_name: str) -> int:
args = process_vyper_args(package_name)
return self.registry.functions.numReleaseIds(*args).call()
def owner(self) -> Address:
"""
Returns the address of the ``owner`` of this registry instance. Only the ``owner``
is allowed to add releases to the Vyper Reference Registry implementation.
"""
return self.registry.functions.owner().call()
def transfer_owner(self, new_owner: Address) -> TxReceipt:
"""
Transfers ownership of this registry instance to the given ``new_owner``. Only the
``owner`` is allowed to transfer ownership.
* Parameters:
* ``new_owner``: The address of the new owner.
"""
tx_hash = self.registry.functions.transferOwner(new_owner).transact()
return self.w3.eth.waitForTransactionReceipt(tx_hash)
class SolidityReferenceRegistry(ERCRegistry):
"""
This class represents an instance of the `Solidity Reference Registry implementation
<https://github.com/ethpm/py-ethpm/tree/master/ethpm/assets/registry>`__.
To use this subclass, you must manually set an instance of this class to the
``registry`` attribute on ``web3.pm``.
"""
def __init__(self, address: Address, w3: Web3) -> None:
abi = get_solidity_registry_manifest()["contract_types"]["PackageRegistry"][
"abi"
]
self.registry = w3.eth.contract(address=address, abi=abi)
self.address = to_checksum_address(address)
self.w3 = w3
def _release(self, package_name: str, version: str, manifest_uri: str) -> bytes:
tx_hash = self.registry.functions.release(
package_name, version, manifest_uri
).transact()
self.w3.eth.waitForTransactionReceipt(tx_hash)
return self._get_release_id(package_name, version)
def _get_package_name(self, package_id: bytes) -> str:
package_name = self.registry.functions.getPackageName(package_id).call()
return package_name
@to_tuple
def _get_all_package_ids(self) -> Iterable[Tuple[bytes]]:
num_packages = self._num_package_ids()
# Logic here b/c Solidity Reference Registry implementation returns ids in reverse order
package_ids = [
self.registry.functions.getAllPackageIds(index, (index + 4)).call()[0]
for index in range(0, num_packages, 4)
]
for package_id in concat([x[::-1] for x in package_ids]):
yield package_id
def _get_release_id(self, package_name: str, version: str) -> bytes:
return self.registry.functions.getReleaseId(package_name, version).call()
@to_tuple
def _get_all_release_ids(self, package_name: str) -> Iterable[Tuple[bytes]]:
num_releases = self._num_release_ids(package_name)
# Logic here b/c Solidity Reference Registry implementation returns ids in reverse order
release_ids = [
self.registry.functions.getAllReleaseIds(
package_name, index, (index + 4)
).call()[0]
for index in range(0, num_releases, 4)
]
for release_id in concat([x[::-1] for x in release_ids]):
yield release_id
@to_tuple
def _get_release_data(self, release_id: bytes) -> Iterable[Tuple[str]]:
release_data = self.registry.functions.getReleaseData(release_id).call()
for data in release_data:
yield data
def _generate_release_id(self, package_name: str, version: str) -> bytes:
return self.registry.functions.generateReleaseId(package_name, version).call()
def _num_package_ids(self) -> int:
return self.registry.functions.numPackageIds().call()
def _num_release_ids(self, package_name: str) -> int:
return self.registry.functions.numReleaseIds(package_name).call()
class PM(Module):
"""
By default, the PM module uses the Vyper Reference Registry `implementation
<https://github.com/ethpm/py-ethpm/blob/master/ethpm/assets/vyper_registry/registry.vy>`__.
However, it will work with any subclass of ``ERCRegistry``, tailored to a particular
implementation of `ERC1319 <https://github.com/ethereum/EIPs/issues/1319>`__, set as
its ``registry`` attribute.
"""
def get_package_from_manifest(self, manifest: Manifest) -> Package:
"""
Returns a `Package <https://github.com/ethpm/py-ethpm/blob/master/ethpm/package.py>`__
instance built with the given manifest.
* Parameters:
* ``manifest``: A dict representing a valid manifest
"""
return Package(manifest, self.web3)
def get_package_from_uri(self, manifest_uri: URI) -> Package:
"""
Returns a `Package <https://github.com/ethpm/py-ethpm/blob/master/ethpm/package.py>`__
instance built with the Manifest stored at the URI.
If you want to use a specific IPFS backend, set ``ETHPM_IPFS_BACKEND_CLASS``
to your desired backend. Defaults to Infura IPFS backend.
* Parameters:
* ``uri``: Must be a valid content-addressed URI
"""
return Package.from_uri(manifest_uri, self.web3)
def set_registry(self, address: Address) -> None:
"""
Sets the current registry used in ``web3.pm`` functions that read/write to an on-chain
registry. This method accepts checksummed/canonical addresses or ENS names. Addresses
must point to an instance of the Vyper Reference Registry implementation.
If you want to use a different registry implementation with ``web3.pm``, manually
set the ``web3.pm.registry`` attribute to any subclass of ``ERCRegistry``.
To use an ENS domain as the address, make sure a valid ENS instance set as ``web3.ens``.
* Parameters:
* ``address``: Address of on-chain Vyper Reference Registry.
"""
if is_canonical_address(address) or is_checksum_address(address):
canonical_address = to_canonical_address(address)
self.registry = VyperReferenceRegistry(canonical_address, self.web3)
elif is_ens_name(address):
self._validate_set_ens()
addr_lookup = self.web3.ens.address(address)
if not addr_lookup:
raise NameNotFound(
"No address found after ENS lookup for name: {0}.".format(address)
)
self.registry = VyperReferenceRegistry(
to_canonical_address(addr_lookup), self.web3
)
else:
raise PMError(
"Expected a canonical/checksummed address or ENS name for the address, "
"instead received {0}.".format(type(address))
)
def deploy_and_set_registry(self) -> Address:
"""
Returns the address of a freshly deployed instance of the `vyper registry
<https://github.com/ethpm/py-ethpm/blob/master/ethpm/assets/vyper_registry/registry.vy>`__,
and sets the newly deployed registry as the active registry on ``web3.pm.registry``.
To tie your registry to an ENS name, use web3's ENS module, ie.
.. code-block:: python
w3.ens.setup_address(ens_name, w3.pm.registry.address)
"""
self.registry = VyperReferenceRegistry.deploy_new_instance(self.web3)
return to_checksum_address(self.registry.address)
def release_package(
self, package_name: str, version: str, manifest_uri: str
) -> bytes:
"""
Returns the release id generated by releasing a package on the current registry.
Requires ``web3.PM`` to have a registry set. Requires ``web3.eth.defaultAccount``
to be the registry owner.
* Parameters:
* ``package_name``: Must be a valid package name, matching the given manifest.
* ``version``: Must be a valid package version, matching the given manifest.
* ``manifest_uri``: Must be a valid content-addressed URI. Currently, only IPFS
and Github content-addressed URIs are supported.
"""
validate_is_supported_manifest_uri(manifest_uri)
raw_manifest = to_text(resolve_uri_contents(manifest_uri))
validate_raw_manifest_format(raw_manifest)
manifest = json.loads(raw_manifest)
validate_manifest_against_schema(manifest)
if package_name != manifest['package_name']:
raise ManifestValidationError(
f"Provided package name: {package_name} does not match the package name "
f"found in the manifest: {manifest['package_name']}."
)
if version != manifest['version']:
raise ManifestValidationError(
f"Provided package version: {version} does not match the package version "
f"found in the manifest: {manifest['version']}."
)
self._validate_set_registry()
return self.registry._release(package_name, version, manifest_uri)
@to_tuple
def get_all_package_names(self) -> Iterable[str]:
"""
Returns a tuple containing all the package names available on the current registry.
"""
self._validate_set_registry()
package_ids = self.registry._get_all_package_ids()
for package_id in package_ids:
yield self.registry._get_package_name(package_id)
def get_package_count(self) -> int:
"""
Returns the number of packages available on the current registry.
"""
self._validate_set_registry()
return self.registry._num_package_ids()
def get_release_count(self, package_name: str) -> int:
"""
Returns the number of releases of the given package name available on the current registry.
"""
validate_package_name(package_name)
self._validate_set_registry()
return self.registry._num_release_ids(package_name)
def get_release_id(self, package_name: str, version: str) -> bytes:
"""
Returns the 32 byte identifier of a release for the given package name and version,
if they are available on the current registry.
"""
validate_package_name(package_name)
validate_package_version(version)
self._validate_set_registry()
return self.registry._get_release_id(package_name, version)
@to_tuple
def get_all_package_releases(self, package_name: str) -> Iterable[Tuple[str, str]]:
"""
Returns a tuple of release data (version, manifest_ur) for every release of the
given package name available on the current registry.
"""
validate_package_name(package_name)
self._validate_set_registry()
release_ids = self.registry._get_all_release_ids(package_name)
for release_id in release_ids:
_, version, manifest_uri = self.registry._get_release_data(release_id)
yield (version, manifest_uri)
def get_release_id_data(self, release_id: bytes) -> Tuple[str, str, str]:
"""
Returns ``(package_name, version, manifest_uri)`` associated with the given
release id, *if* it is available on the current registry.
* Parameters:
* ``release_id``: 32 byte release identifier
"""
self._validate_set_registry()
return self.registry._get_release_data(release_id)
def get_release_data(self, package_name: str, version: str) -> Tuple[str, str, str]:
"""
Returns ``(package_name, version, manifest_uri)`` associated with the given
package name and version, *if* they are published to the currently set registry.
* Parameters:
* ``name``: Must be a valid package name.
* ``version``: Must be a valid package version.
"""
validate_package_name(package_name)
validate_package_version(version)
self._validate_set_registry()
release_id = self.registry._get_release_id(package_name, version)
return self.get_release_id_data(release_id)
def get_package(self, package_name: str, version: str) -> Package:
"""
Returns a ``Package`` instance, generated by the ``manifest_uri`` associated with the
given package name and version, if they are published to the currently set registry.
* Parameters:
* ``name``: Must be a valid package name.
* ``version``: Must be a valid package version.
"""
validate_package_name(package_name)
validate_package_version(version)
self._validate_set_registry()
_, _, release_uri = self.get_release_data(package_name, version)
return self.get_package_from_uri(release_uri)
def _validate_set_registry(self) -> None:
try:
self.registry
except AttributeError:
raise PMError(
"web3.pm does not have a set registry. "
"Please set registry with either: "
"web3.pm.set_registry(address) or "
"web3.pm.deploy_and_set_registry()"
)
if not isinstance(self.registry, ERCRegistry):
raise PMError(
"web3.pm requires an instance of a subclass of ERCRegistry "
"to be set as the web3.pm.registry attribute. Instead found: "
f"{type(self.registry)}."
)
def _validate_set_ens(self) -> None:
if not self.web3:
raise InvalidAddress(
"Could not look up ENS address because no web3 " "connection available"
)
elif not self.web3.ens:
raise InvalidAddress(
"Could not look up ENS address because web3.ens is " "set to None"
)
def get_vyper_registry_manifest() -> Dict[str, Any]:
return json.loads((ASSETS_DIR / "vyper_registry" / "0.1.0.json").read_text())
def get_solidity_registry_manifest() -> Dict[str, Any]:
return json.loads((ASSETS_DIR / "registry" / "1.0.0.json").read_text())
def validate_is_supported_manifest_uri(uri):
if not is_ipfs_uri(uri) and not is_valid_content_addressed_github_uri(uri):
raise ManifestValidationError(
f"URI: {uri} is not a valid content-addressed URI. "
"Currently only IPFS and Github content-addressed URIs are supported."
)
@to_tuple
def process_vyper_args(*args: List[str]) -> Iterable[bytes]:
for arg in args:
yield to_bytes(text=arg) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/pm.py | pm.py |
import functools
import re
from eth_utils import (
is_string,
to_checksum_address,
)
from web3._utils.validation import (
validate_address,
)
def pad_left_hex(value, num_bytes):
return value.rjust(num_bytes * 2, '0')
def iso13616Prepare(iban):
"""
Prepare an IBAN for mod 97 computation by moving the first
4 chars to the end and transforming the letters to numbers
(A = 10, B = 11, ..., Z = 35), as specified in ISO13616.
@method iso13616Prepare
@param {String} iban the IBAN
@returns {String} the prepared IBAN
"""
A = ord("A")
Z = ord("Z")
iban = iban.upper()
iban = iban[4:] + iban[:4]
def charfunc(n):
code = ord(n)
if code >= A and code <= Z:
return str(code - A + 10)
else:
return str(n)
return "".join(map(charfunc, list(iban)))
def mod9710(iban):
"""
Calculates the MOD 97 10 of the passed IBAN as specified in ISO7064.
@method mod9710
@param {String} iban
@returns {Number}
"""
remainder = iban
block = None
while len(remainder) > 2:
block = remainder[:9]
remainder = str(int(block) % 97) + remainder[len(block):]
return int(remainder) % 97
def baseN(num, b, numerals="0123456789abcdefghijklmnopqrstuvwxyz"):
"""
This prototype should be used to create
an iban object from iban correct string
@param {String} iban
"""
return ((num == 0) and numerals[0]) or \
(baseN(num // b, b, numerals).lstrip(numerals[0]) + numerals[num % b])
class IsValid:
"""
Should be called to check if iban is correct
Note: This is implemented as a descriptor so that it can be called as
either an instance method.
@method isValid
@returns {Boolean} true if it is, otherwise false
"""
def __get__(self, instance, owner):
if instance is None:
return self.validate
return functools.partial(self.validate, instance._iban)
@staticmethod
def validate(iban_address):
if not is_string(iban_address):
return False
if re.match(r"^XE[0-9]{2}(ETH[0-9A-Z]{13}|[0-9A-Z]{30,31})$", iban_address) and \
mod9710(iso13616Prepare(iban_address)) == 1:
return True
return False
class Iban:
def __init__(self, iban):
self._iban = iban
@staticmethod
def fromAddress(address):
"""
This method should be used to create
an iban object from ethereum address
@method fromAddress
@param {String} address
@return {Iban} the IBAN object
"""
validate_address(address)
address_as_integer = int(address, 16)
address_as_base36 = baseN(address_as_integer, 36)
padded = pad_left_hex(address_as_base36, 15)
return Iban.fromBban(padded.upper())
@staticmethod
def fromBban(bban):
"""
Convert the passed BBAN to an IBAN for this country specification.
Please note that <i>"generation of the IBAN shall be the exclusive
responsibility of the bank/branch servicing the account"</i>.
This method implements the preferred algorithm described in
http://en.wikipedia.org/wiki/International_Bank_Account_Number#Generating_IBAN_check_digits
@method fromBban
@param {String} bban the BBAN to convert to IBAN
@returns {Iban} the IBAN object
"""
countryCode = "XE"
remainder = mod9710(iso13616Prepare(countryCode + "00" + bban))
checkDigit = ("0" + str(98 - remainder))[-2:]
return Iban(countryCode + checkDigit + bban)
@staticmethod
def createIndirect(options):
"""
Should be used to create IBAN object for given institution and identifier
@method createIndirect
@param {Object} options, required options are "institution" and "identifier"
@return {Iban} the IBAN object
"""
return Iban.fromBban("ETH" + options["institution"] + options["identifier"])
isValid = IsValid()
def isDirect(self):
"""
Should be called to check if iban number is direct
@method isDirect
@returns {Boolean} true if it is, otherwise false
"""
return len(self._iban) in [34, 35]
def isIndirect(self):
"""
Should be called to check if iban number if indirect
@method isIndirect
@returns {Boolean} true if it is, otherwise false
"""
return len(self._iban) == 20
def checksum(self):
"""
Should be called to get iban checksum
Uses the mod-97-10 checksumming protocol (ISO/IEC 7064:2003)
@method checksum
@returns {String} checksum
"""
return self._iban[2:4]
def institution(self):
"""
Should be called to get institution identifier
eg. XREG
@method institution
@returns {String} institution identifier
"""
if self.isIndirect():
return self._iban[7:11]
else:
return ""
def client(self):
"""
Should be called to get client identifier within institution
eg. GAVOFYORK
@method client
@returns {String} client identifier
"""
if self.isIndirect():
return self._iban[11:]
else:
return ""
def address(self):
"""
Should be called to get client direct address
@method address
@returns {String} client direct address
"""
if self.isDirect():
base36 = self._iban[4:]
asInt = int(base36, 36)
return to_checksum_address(pad_left_hex(baseN(asInt, 16), 20))
return ""
def toString(self):
return self._iban | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/iban.py | iban.py |
from collections import (
Hashable,
Mapping,
MutableMapping,
OrderedDict,
Sequence,
)
from eth_utils import (
is_integer,
)
from web3._utils.formatters import (
recursive_map,
)
# Hashable must be immutable:
# "the implementation of hashable collections requires that a key's hash value is immutable"
# https://docs.python.org/3/reference/datamodel.html#object.__hash__
class ReadableAttributeDict(Mapping):
"""
The read attributes for the AttributeDict types
"""
def __init__(self, dictionary, *args, **kwargs):
self.__dict__ = dict(dictionary)
self.__dict__.update(dict(*args, **kwargs))
def __getitem__(self, key):
return self.__dict__[key]
def __iter__(self):
return iter(self.__dict__)
def __len__(self):
return len(self.__dict__)
def __repr__(self):
return self.__class__.__name__ + "(%r)" % self.__dict__
def _repr_pretty_(self, builder, cycle):
"""
Custom pretty output for the IPython console
"""
builder.text(self.__class__.__name__ + "(")
if cycle:
builder.text("<cycle>")
else:
builder.pretty(self.__dict__)
builder.text(")")
@classmethod
def _apply_if_mapping(cls, value):
if isinstance(value, Mapping):
return cls(value)
else:
return value
@classmethod
def recursive(cls, value):
return recursive_map(cls._apply_if_mapping, value)
class MutableAttributeDict(MutableMapping, ReadableAttributeDict):
def __setitem__(self, key, val):
self.__dict__[key] = val
def __delitem__(self, key):
del self.__dict__[key]
class AttributeDict(ReadableAttributeDict, Hashable):
"""
This provides superficial immutability, someone could hack around it
"""
def __setattr__(self, attr, val):
if attr == '__dict__':
super().__setattr__(attr, val)
else:
raise TypeError('This data is immutable -- create a copy instead of modifying')
def __delattr__(self, key):
raise TypeError('This data is immutable -- create a copy instead of modifying')
def __hash__(self):
return hash(tuple(sorted(self.items())))
def __eq__(self, other):
if isinstance(other, Mapping):
return self.__dict__ == dict(other)
else:
return False
class NamedElementOnion(Mapping):
"""
Add layers to an onion-shaped structure. Optionally, inject to a specific layer.
This structure is iterable, where the outermost layer is first, and innermost is last.
"""
def __init__(self, init_elements, valid_element=callable):
self._queue = OrderedDict()
for element in reversed(init_elements):
if valid_element(element):
self.add(element)
else:
self.add(*element)
def add(self, element, name=None):
if name is None:
name = element
if name in self._queue:
if name is element:
raise ValueError("You can't add the same un-named instance twice")
else:
raise ValueError("You can't add the same name again, use replace instead")
self._queue[name] = element
def inject(self, element, name=None, layer=None):
"""
Inject a named element to an arbitrary layer in the onion.
The current implementation only supports insertion at the innermost layer,
or at the outermost layer. Note that inserting to the outermost is equivalent
to calling :meth:`add` .
"""
if not is_integer(layer):
raise TypeError("The layer for insertion must be an int.")
elif layer != 0 and layer != len(self._queue):
raise NotImplementedError(
"You can only insert to the beginning or end of a %s, currently. "
"You tried to insert to %d, but only 0 and %d are permitted. " % (
type(self),
layer,
len(self._queue),
)
)
self.add(element, name=name)
if layer == 0:
if name is None:
name = element
self._queue.move_to_end(name, last=False)
elif layer == len(self._queue):
return
else:
raise AssertionError("Impossible to reach: earlier validation raises an error")
def clear(self):
self._queue.clear()
def replace(self, old, new):
if old not in self._queue:
raise ValueError("You can't replace unless one already exists, use add instead")
to_be_replaced = self._queue[old]
if to_be_replaced is old:
# re-insert with new name in old slot
self._replace_with_new_name(old, new)
else:
self._queue[old] = new
return to_be_replaced
def remove(self, old):
if old not in self._queue:
raise ValueError("You can only remove something that has been added")
del self._queue[old]
def _replace_with_new_name(self, old, new):
self._queue[new] = new
found_old = False
for key in list(self._queue.keys()):
if not found_old:
if key == old:
found_old = True
continue
elif key != new:
self._queue.move_to_end(key)
del self._queue[old]
def __iter__(self):
elements = self._queue.values()
if not isinstance(elements, Sequence):
elements = list(elements)
return iter(reversed(elements))
def __add__(self, other):
if not isinstance(other, NamedElementOnion):
raise NotImplementedError("You can only combine with another NamedElementOnion")
combined = self._queue.copy()
combined.update(other._queue)
return NamedElementOnion(combined.items())
def __contains__(self, element):
return element in self._queue
def __getitem__(self, element):
return self._queue[element]
def __len__(self):
return len(self._queue)
def __reversed__(self):
elements = self._queue.values()
if not isinstance(elements, Sequence):
elements = list(elements)
return iter(elements) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/datastructures.py | datastructures.py |
from web3._utils.filters import (
ShhFilter,
)
from web3.module import (
Module,
)
class Shh(Module):
@property
def version(self):
return self.web3.manager.request_blocking("shh_version", [])
@property
def info(self):
return self.web3.manager.request_blocking("shh_info", [])
def setMaxMessageSize(self, size):
return self.web3.manager.request_blocking("shh_setMaxMessageSize", [size])
def setMinPoW(self, min_pow):
return self.web3.manager.request_blocking("shh_setMinPoW", [min_pow])
def markTrustedPeer(self, enode):
return self.web3.manager.request_blocking("shh_markTrustedPeer", [enode])
def newKeyPair(self):
return self.web3.manager.request_blocking("shh_newKeyPair", [])
def addPrivateKey(self, key):
return self.web3.manager.request_blocking("shh_addPrivateKey", [key])
def deleteKeyPair(self, id):
return self.web3.manager.request_blocking("shh_deleteKeyPair", [id])
def hasKeyPair(self, id):
return self.web3.manager.request_blocking("shh_hasKeyPair", [id])
def getPublicKey(self, id):
return self.web3.manager.request_blocking("shh_getPublicKey", [id])
def getPrivateKey(self, id):
return self.web3.manager.request_blocking("shh_getPrivateKey", [id])
def newSymKey(self):
return self.web3.manager.request_blocking("shh_newSymKey", [])
def addSymKey(self, key):
return self.web3.manager.request_blocking("shh_addSymKey", [key])
def generateSymKeyFromPassword(self, password):
return self.web3.manager.request_blocking("shh_generateSymKeyFromPassword", [password])
def hasSymKey(self, id):
return self.web3.manager.request_blocking("shh_hasSymKey", [id])
def getSymKey(self, id):
return self.web3.manager.request_blocking("shh_getSymKey", [id])
def deleteSymKey(self, id):
return self.web3.manager.request_blocking("shh_deleteSymKey", [id])
def post(self, message):
if message and ("payload" in message):
return self.web3.manager.request_blocking("shh_post", [message])
else:
raise ValueError(
"message cannot be None or does not contain field 'payload'"
)
def newMessageFilter(self, criteria, poll_interval=None):
filter_id = self.web3.manager.request_blocking("shh_newMessageFilter", [criteria])
return ShhFilter(self.web3, filter_id, poll_interval=poll_interval)
def deleteMessageFilter(self, filter_id):
return self.web3.manager.request_blocking("shh_deleteMessageFilter", [filter_id])
def getMessages(self, filter_id):
return self.web3.manager.request_blocking("shh_getFilterMessages", [filter_id]) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/shh.py | shh.py |
import logging
import uuid
from web3._utils.decorators import (
deprecated_for,
)
from web3._utils.threads import (
spawn,
)
from web3.datastructures import (
NamedElementOnion,
)
from web3.middleware import (
abi_middleware,
attrdict_middleware,
gas_price_strategy_middleware,
name_to_address_middleware,
normalize_errors_middleware,
pythonic_middleware,
request_parameter_normalizer,
validation_middleware,
)
from web3.providers import (
AutoProvider,
)
class RequestManager:
logger = logging.getLogger("web3.RequestManager")
def __init__(self, web3, provider=None, middlewares=None):
self.web3 = web3
self.pending_requests = {}
if middlewares is None:
middlewares = self.default_middlewares(web3)
self.middleware_onion = NamedElementOnion(middlewares)
if provider is None:
self.provider = AutoProvider()
else:
self.provider = provider
web3 = None
_provider = None
@property
def provider(self):
return self._provider
@provider.setter
def provider(self, provider):
self._provider = provider
@staticmethod
def default_middlewares(web3):
"""
List the default middlewares for the request manager.
Leaving ens unspecified will prevent the middleware from resolving names.
"""
return [
(request_parameter_normalizer, 'request_param_normalizer'),
(gas_price_strategy_middleware, 'gas_price_strategy'),
(name_to_address_middleware(web3), 'name_to_address'),
(attrdict_middleware, 'attrdict'),
(pythonic_middleware, 'pythonic'),
(normalize_errors_middleware, 'normalize_errors'),
(validation_middleware, 'validation'),
(abi_middleware, 'abi'),
]
#
# Provider requests and response
#
def _make_request(self, method, params):
request_func = self.provider.request_func(
self.web3,
tuple(self.middleware_onion))
self.logger.debug("Making request. Method: %s", method)
return request_func(method, params)
async def _coro_make_request(self, method, params):
request_func = self.provider.request_func(
self.web3,
tuple(self.middleware_onion))
self.logger.debug("Making request. Method: %s", method)
return await request_func(method, params)
def request_blocking(self, method, params):
"""
Make a synchronous request using the provider
"""
response = self._make_request(method, params)
if "error" in response:
raise ValueError(response["error"])
return response['result']
async def coro_request(self, method, params):
"""
Couroutine for making a request using the provider
"""
response = await self._coro_make_request(method, params)
if "error" in response:
raise ValueError(response["error"])
return response['result']
@deprecated_for("coro_request")
def request_async(self, raw_method, raw_params):
request_id = uuid.uuid4()
self.pending_requests[request_id] = spawn(
self.request_blocking,
raw_method=raw_method,
raw_params=raw_params,
)
return request_id
def receive_blocking(self, request_id, timeout=None):
try:
request = self.pending_requests.pop(request_id)
except KeyError:
raise KeyError("Request for id:{0} not found".format(request_id))
else:
response = request.get(timeout=timeout)
if "error" in response:
raise ValueError(response["error"])
return response['result']
def receive_async(self, request_id, *args, **kwargs):
raise NotImplementedError("Callback pattern not implemented") | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/manager.py | manager.py |
import functools
from eth_utils import (
to_tuple,
)
from eth_utils.toolz import (
identity,
pipe,
)
def _munger_star_apply(fn):
@functools.wraps(fn)
def inner(args):
return fn(*args)
return inner
def get_default_formatters(*args, **kwargs):
return ([identity], [identity],)
def default_munger(module, *args, **kwargs):
if not args and not kwargs:
return tuple()
else:
raise TypeError("Parameters passed to method without parameter mungers defined.")
class Method:
"""Method object for web3 module methods
Calls to the Method go through these steps:
1. input munging - includes normalization, parameter checking, early parameter
formatting. Any processing on the input parameters that need to happen before
json_rpc method string selection occurs.
A note about mungers: The first (root) munger should reflect the desired
api function arguments. In other words, if the api function wants to
behave as: `getBalance(account, block_identifier=None)`, the root munger
should accept these same arguments, with the addition of the module as
the first argument e.g.:
```
def getBalance_root_munger(module, account, block_identifier=None):
if block_identifier is None:
block_identifier = DEFAULT_BLOCK
return module, [account, block_identifier]
```
all mungers should return an argument list.
if no munger is provided, a default munger expecting no method arguments
will be used.
2. method selection - The json_rpc_method argument can be method string or a
function that returns a method string. If a callable is provided the processed
method inputs are passed to the method selection function, and the returned
method string is used.
3. request and response formatters are retrieved - formatters are retrieved
using the json rpc method string. The lookup function provided by the
formatter_lookup_fn configuration is passed the method string and is
expected to return a 2-tuple of lists containing the
request_formatters and response_formatters in that order.
e.g. ([*request_formatters], [*response_formatters]).
4. After the parameter processing from steps 1-3 the request is made using
the calling function returned by the module attribute ``retrieve_caller_fn``
and the reponse formatters are applied to the output.
"""
def __init__(
self,
json_rpc_method=None,
mungers=None,
formatter_lookup_fn=None,
web3=None):
self.json_rpc_method = json_rpc_method
self.mungers = mungers or [default_munger]
self.formatter_lookup_fn = formatter_lookup_fn or get_default_formatters
def __get__(self, obj=None, obj_type=None):
if obj is None:
raise TypeError(
"Direct calls to methods are not supported. "
"Methods must be called from an module instance, "
"usually attached to a web3 instance.")
return obj.retrieve_caller_fn(self)
@property
def method_selector_fn(self):
"""Gets the method selector from the config.
"""
if callable(self.json_rpc_method):
return self.json_rpc_method
elif isinstance(self.json_rpc_method, (str,)):
return lambda *_: self.json_rpc_method
raise ValueError("``json_rpc_method`` config invalid. May be a string or function")
def get_formatters(self, method_string):
"""Lookup the request formatters for the rpc_method
The lookup_fn output is expected to be a 2 length tuple of lists of
the request and output formatters, respectively.
"""
formatters = self.formatter_lookup_fn(method_string)
return formatters or get_default_formatters()
def input_munger(self, val):
try:
module, args, kwargs = val
except TypeError:
raise ValueError("input_munger expects a 3-tuple")
# TODO: Create friendly error output.
mungers_iter = iter(self.mungers)
root_munger = next(mungers_iter)
munged_inputs = pipe(
root_munger(module, *args, **kwargs),
*map(lambda m: _munger_star_apply(functools.partial(m, module)), mungers_iter))
return munged_inputs
def process_params(self, module, *args, **kwargs):
# takes in input params, steps 1-3
params, method, (req_formatters, ret_formatters) = _pipe_and_accumulate(
(module, args, kwargs,),
[self.input_munger, self.method_selector_fn, self.get_formatters])
return (method, pipe(params, *req_formatters)), ret_formatters
@to_tuple
def _pipe_and_accumulate(val, fns):
"""pipes val through a list of fns while accumulating results from
each function, returning a tuple.
e.g.:
>>> _pipe_and_accumulate([lambda x: x**2, lambda x: x*10], 5)
(25, 250)
"""
for fn in fns:
val = fn(val)
yield val | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/method.py | method.py |
import datetime
import time
class BadFunctionCallOutput(Exception):
"""
We failed to decode ABI output.
Most likely ABI mismatch.
"""
pass
class BlockNumberOutofRange(Exception):
"""
block_identifier passed does not match known block.
"""
pass
class CannotHandleRequest(Exception):
"""
Raised by a provider to signal that it cannot handle an RPC request and
that the manager should proceed to the next provider.
"""
pass
class InvalidAddress(ValueError):
"""
The supplied address does not have a valid checksum, as defined in EIP-55
"""
pass
class NameNotFound(ValueError):
"""
Raised when a caller provides an Ethereum Name Service name that
does not resolve to an address.
"""
pass
class StaleBlockchain(Exception):
"""
Raised by the stalecheck_middleware when the latest block is too old.
"""
def __init__(self, block, allowable_delay):
last_block_date = datetime.datetime.fromtimestamp(block.timestamp).strftime('%c')
message = (
"The latest block, #%d, is %d seconds old, but is only allowed to be %d s old. "
"The date of the most recent block is %s. Continue syncing and try again..." %
(block.number, time.time() - block.timestamp, allowable_delay, last_block_date)
)
super().__init__(message, block, allowable_delay)
def __str__(self):
return self.args[0]
class MismatchedABI(Exception):
"""
Raised when an ABI does not match with supplied parameters, or when an
attempt is made to access a function/event that does not exist in the ABI.
"""
pass
class FallbackNotFound(Exception):
"""
Raised when fallback function doesn't exist in contract.
"""
pass
class ValidationError(Exception):
"""
Raised when a supplied value is invalid.
"""
pass
class NoABIFunctionsFound(AttributeError):
"""
Raised when an ABI is present, but doesn't contain any functions.
"""
pass
class NoABIFound(AttributeError):
"""
Raised when no ABI is present.
"""
pass
class NoABIEventsFound(AttributeError):
"""
Raised when an ABI doesn't contain any events.
"""
pass
class InsufficientData(Exception):
"""
Raised when there are insufficient data points to
complete a calculation
"""
pass
class TimeExhausted(Exception):
"""
Raised when a method has not retrieved the desired result within a specified timeout.
"""
pass
class PMError(Exception):
"""
Raised when an error occurs in the PM module.
"""
pass
class ManifestValidationError(PMError):
"""
Raised when a provided manifest cannot be published, since it's invalid.
"""
pass | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/exceptions.py | exceptions.py |
import codecs
from distutils.version import (
LooseVersion,
)
import functools
import json
import eth_abi
from eth_utils import (
to_checksum_address,
)
from eth_utils.address import (
is_binary_address,
)
from hexbytes import (
HexBytes,
)
from web3._utils.abi import (
process_type,
)
from web3._utils.encoding import (
hexstr_if_str,
text_if_str,
to_bytes,
to_hex,
to_text,
)
from web3._utils.ens import (
StaticENS,
is_ens_name,
validate_name_has_address,
)
from web3._utils.toolz import (
curry,
)
from web3._utils.validation import (
validate_abi,
validate_address,
)
from web3.exceptions import (
InvalidAddress,
)
def implicitly_identity(to_wrap):
@functools.wraps(to_wrap)
def wrapper(abi_type, data):
modified = to_wrap(abi_type, data)
if modified is None:
return abi_type, data
else:
return modified
return wrapper
#
# Return Normalizers
#
@implicitly_identity
def addresses_checksummed(abi_type, data):
if abi_type == 'address':
return abi_type, to_checksum_address(data)
@implicitly_identity
def decode_abi_strings(abi_type, data):
if abi_type == 'string':
return abi_type, codecs.decode(data, 'utf8', 'backslashreplace')
#
# Argument Normalizers
#
@implicitly_identity
def abi_bytes_to_hex(abi_type, data):
base, sub, arrlist = process_type(abi_type)
if base == 'bytes' and not arrlist:
bytes_data = hexstr_if_str(to_bytes, data)
if not sub:
return abi_type, to_hex(bytes_data)
else:
num_bytes = int(sub)
if len(bytes_data) <= num_bytes:
padded = bytes_data.ljust(num_bytes, b'\0')
return abi_type, to_hex(padded)
else:
raise ValueError(
"This value was expected to be at most %d bytes, but instead was %d: %r" % (
(num_bytes, len(bytes_data), data)
)
)
@implicitly_identity
def abi_int_to_hex(abi_type, data):
base, _sub, arrlist = process_type(abi_type)
if base == 'uint' and not arrlist:
return abi_type, hexstr_if_str(to_hex, data)
@implicitly_identity
def abi_string_to_hex(abi_type, data):
if abi_type == 'string':
return abi_type, text_if_str(to_hex, data)
@implicitly_identity
def abi_string_to_text(abi_type, data):
if abi_type == 'string':
return abi_type, text_if_str(to_text, data)
@implicitly_identity
def abi_bytes_to_bytes(abi_type, data):
base, sub, arrlist = process_type(abi_type)
if base == 'bytes' and not arrlist:
return abi_type, hexstr_if_str(to_bytes, data)
@implicitly_identity
def abi_address_to_hex(abi_type, data):
if abi_type == 'address':
validate_address(data)
if is_binary_address(data):
return abi_type, to_checksum_address(data)
@curry
def abi_ens_resolver(w3, abi_type, val):
if abi_type == 'address' and is_ens_name(val):
if w3 is None:
raise InvalidAddress(
"Could not look up name %r because no web3"
" connection available" % (val)
)
elif w3.ens is None:
raise InvalidAddress(
"Could not look up name %r because ENS is"
" set to None" % (val)
)
elif int(w3.net.version) is not 1 and not isinstance(w3.ens, StaticENS):
raise InvalidAddress(
"Could not look up name %r because web3 is"
" not connected to mainnet" % (val)
)
else:
return (abi_type, validate_name_has_address(w3.ens, val))
else:
return (abi_type, val)
BASE_RETURN_NORMALIZERS = [
addresses_checksummed,
]
if LooseVersion(eth_abi.__version__) < LooseVersion("2"):
BASE_RETURN_NORMALIZERS.append(decode_abi_strings)
#
# Property Normalizers
#
def normalize_abi(abi):
if isinstance(abi, str):
abi = json.loads(abi)
validate_abi(abi)
return abi
def normalize_address(ens, address):
if address:
if is_ens_name(address):
validate_name_has_address(ens, address)
else:
validate_address(address)
return address
def normalize_bytecode(bytecode):
if bytecode:
bytecode = HexBytes(bytecode)
return bytecode | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/normalizers.py | normalizers.py |
from eth_abi import (
decode_abi,
is_encodable,
)
from eth_abi.grammar import (
parse as parse_type_string,
)
from eth_utils import (
is_list_like,
is_string,
is_text,
)
from hexbytes import (
HexBytes,
)
from web3._utils.formatters import (
apply_formatter_if,
)
from web3._utils.threads import (
TimerClass,
)
from web3._utils.toolz import (
complement,
curry,
)
from web3._utils.validation import (
validate_address,
)
from .events import (
construct_event_data_set,
construct_event_topic_set,
)
def construct_event_filter_params(event_abi,
contract_address=None,
argument_filters=None,
topics=None,
fromBlock=None,
toBlock=None,
address=None):
filter_params = {}
topic_set = construct_event_topic_set(event_abi, argument_filters)
if topics is not None:
if len(topic_set) > 1:
raise TypeError(
"Merging the topics argument with topics generated "
"from argument_filters is not supported.")
topic_set = topics
if len(topic_set) == 1 and is_list_like(topic_set[0]):
filter_params['topics'] = topic_set[0]
else:
filter_params['topics'] = topic_set
if address and contract_address:
if is_list_like(address):
filter_params['address'] = address + [contract_address]
elif is_string(address):
filter_params['address'] = [address, contract_address]
else:
raise ValueError(
"Unsupported type for `address` parameter: {0}".format(type(address))
)
elif address:
filter_params['address'] = address
elif contract_address:
filter_params['address'] = contract_address
if 'address' not in filter_params:
pass
elif is_list_like(filter_params['address']):
for addr in filter_params['address']:
validate_address(addr)
else:
validate_address(filter_params['address'])
if fromBlock is not None:
filter_params['fromBlock'] = fromBlock
if toBlock is not None:
filter_params['toBlock'] = toBlock
data_filters_set = construct_event_data_set(event_abi, argument_filters)
return data_filters_set, filter_params
class Filter:
callbacks = None
stopped = False
poll_interval = None
filter_id = None
def __init__(self, web3, filter_id):
self.web3 = web3
self.filter_id = filter_id
self.callbacks = []
super().__init__()
def __str__(self):
return "Filter for {0}".format(self.filter_id)
def format_entry(self, entry):
"""
Hook for subclasses to change the format of the value that is passed
into the callback functions.
"""
return entry
def is_valid_entry(self, entry):
"""
Hook for subclasses to implement additional filtering layers.
"""
return True
def _filter_valid_entries(self, entries):
return filter(self.is_valid_entry, entries)
def get_new_entries(self):
log_entries = self._filter_valid_entries(self.web3.eth.getFilterChanges(self.filter_id))
return self._format_log_entries(log_entries)
def get_all_entries(self):
log_entries = self._filter_valid_entries(self.web3.eth.getFilterLogs(self.filter_id))
return self._format_log_entries(log_entries)
def _format_log_entries(self, log_entries=None):
if log_entries is None:
return []
formatted_log_entries = [
self.format_entry(log_entry) for log_entry in log_entries
]
return formatted_log_entries
class BlockFilter(Filter):
pass
class TransactionFilter(Filter):
pass
class LogFilter(Filter):
data_filter_set = None
data_filter_set_regex = None
log_entry_formatter = None
def __init__(self, *args, **kwargs):
self.log_entry_formatter = kwargs.pop(
'log_entry_formatter',
self.log_entry_formatter,
)
if 'data_filter_set' in kwargs:
self.set_data_filters(kwargs.pop('data_filter_set'))
super().__init__(*args, **kwargs)
def format_entry(self, entry):
if self.log_entry_formatter:
return self.log_entry_formatter(entry)
return entry
def set_data_filters(self, data_filter_set):
"""Sets the data filters (non indexed argument filters)
Expects a set of tuples with the type and value, e.g.:
(('uint256', [12345, 54321]), ('string', ('a-single-string',)))
"""
self.data_filter_set = data_filter_set
if any(data_filter_set):
self.data_filter_set_function = match_fn(data_filter_set)
def is_valid_entry(self, entry):
if not self.data_filter_set:
return True
return bool(self.data_filter_set_function(entry['data']))
def decode_utf8_bytes(value):
return value.decode("utf-8")
not_text = complement(is_text)
normalize_to_text = apply_formatter_if(not_text, decode_utf8_bytes)
def normalize_data_values(type_string, data_value):
"""Decodes utf-8 bytes to strings for abi string values.
eth-abi v1 returns utf-8 bytes for string values.
This can be removed once eth-abi v2 is required.
"""
_type = parse_type_string(type_string)
if _type.base == "string":
if _type.arrlist is not None:
return tuple((normalize_to_text(value) for value in data_value))
else:
return normalize_to_text(data_value)
return data_value
@curry
def match_fn(match_values_and_abi, data):
"""Match function used for filtering non-indexed event arguments.
Values provided through the match_values_and_abi parameter are
compared to the abi decoded log data.
"""
abi_types, all_match_values = zip(*match_values_and_abi)
decoded_values = decode_abi(abi_types, HexBytes(data))
for data_value, match_values, abi_type in zip(decoded_values, all_match_values, abi_types):
if match_values is None:
continue
normalized_data = normalize_data_values(abi_type, data_value)
for value in match_values:
if not is_encodable(abi_type, value):
raise ValueError(
"Value {0} is of the wrong abi type. "
"Expected {1} typed value.".format(value, abi_type))
if value == normalized_data:
break
else:
return False
return True
class ShhFilter(Filter):
def __init__(self, *args, **kwargs):
self.poll_interval = kwargs.pop(
'poll_interval',
self.poll_interval,
)
super().__init__(*args, **kwargs)
def get_new_entries(self):
log_entries = self._filter_valid_entries(self.web3.shh.getMessages(self.filter_id))
return self._format_log_entries(log_entries)
def get_all_entries(self):
raise NotImplementedError()
def watch(self, callback):
def callback_wrapper():
entries = self.get_new_entries()
if entries:
callback(entries)
timer = TimerClass(self.poll_interval, callback_wrapper)
timer.daemon = True
timer.start()
return timer | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/filters.py | filters.py |
from eth_utils import (
to_dict,
)
from web3._utils.abi import (
map_abi_data,
)
from web3._utils.formatters import (
apply_formatter_at_index,
)
from web3._utils.toolz import (
curry,
)
TRANSACTION_PARAMS_ABIS = {
'data': 'bytes',
'from': 'address',
'gas': 'uint',
'gasPrice': 'uint',
'nonce': 'uint',
'to': 'address',
'value': 'uint',
}
FILTER_PARAMS_ABIS = {
'to': 'address',
'address': 'address[]',
}
TRACE_PARAMS_ABIS = {
'to': 'address',
'from': 'address',
}
RPC_ABIS = {
# eth
'eth_call': TRANSACTION_PARAMS_ABIS,
'eth_estimateGas': TRANSACTION_PARAMS_ABIS,
'eth_getBalance': ['address', None],
'eth_getBlockByHash': ['bytes32', 'bool'],
'eth_getBlockTransactionCountByHash': ['bytes32'],
'eth_getCode': ['address', None],
'eth_getLogs': FILTER_PARAMS_ABIS,
'eth_getStorageAt': ['address', 'uint', None],
'eth_getTransactionByBlockHashAndIndex': ['bytes32', 'uint'],
'eth_getTransactionByHash': ['bytes32'],
'eth_getTransactionCount': ['address', None],
'eth_getTransactionReceipt': ['bytes32'],
'eth_getUncleCountByBlockHash': ['bytes32'],
'eth_newFilter': FILTER_PARAMS_ABIS,
'eth_sendRawTransaction': ['bytes'],
'eth_sendTransaction': TRANSACTION_PARAMS_ABIS,
'eth_sign': ['address', 'bytes'],
# personal
'personal_sendTransaction': TRANSACTION_PARAMS_ABIS,
'personal_lockAccount': ['address'],
'personal_unlockAccount': ['address', None, None],
'personal_sign': [None, 'address', None],
'trace_call': TRACE_PARAMS_ABIS,
# parity
'parity_listStorageKeys': ['address', None, None, None],
}
@curry
def apply_abi_formatters_to_dict(normalizers, abi_dict, data):
fields = list(set(abi_dict.keys()) & set(data.keys()))
formatted_values = map_abi_data(
normalizers,
[abi_dict[field] for field in fields],
[data[field] for field in fields],
)
formatted_dict = dict(zip(fields, formatted_values))
return dict(data, **formatted_dict)
@to_dict
def abi_request_formatters(normalizers, abis):
for method, abi_types in abis.items():
if isinstance(abi_types, list):
yield method, map_abi_data(normalizers, abi_types)
elif isinstance(abi_types, dict):
single_dict_formatter = apply_abi_formatters_to_dict(normalizers, abi_types)
yield method, apply_formatter_at_index(single_dict_formatter, 0)
else:
raise TypeError("ABI definitions must be a list or dictionary, got %r" % abi_types) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/rpc_abi.py | rpc_abi.py |
import json
import re
from eth_abi.encoding import (
BaseArrayEncoder,
)
from eth_utils import (
add_0x_prefix,
big_endian_to_int,
decode_hex,
encode_hex,
int_to_big_endian,
is_boolean,
is_bytes,
is_hex,
is_integer,
is_list_like,
remove_0x_prefix,
to_hex,
)
from hexbytes import (
HexBytes,
)
from web3._utils.abi import (
is_address_type,
is_array_type,
is_bool_type,
is_bytes_type,
is_int_type,
is_string_type,
is_uint_type,
size_of_type,
sub_type_of_array_type,
)
from web3._utils.toolz import (
curry,
)
from web3._utils.validation import (
assert_one_val,
validate_abi_type,
validate_abi_value,
)
from web3.datastructures import (
AttributeDict,
)
def hex_encode_abi_type(abi_type, value, force_size=None):
"""
Encodes value into a hex string in format of abi_type
"""
validate_abi_type(abi_type)
validate_abi_value(abi_type, value)
data_size = force_size or size_of_type(abi_type)
if is_array_type(abi_type):
sub_type = sub_type_of_array_type(abi_type)
return "".join([remove_0x_prefix(hex_encode_abi_type(sub_type, v, 256)) for v in value])
elif is_bool_type(abi_type):
return to_hex_with_size(value, data_size)
elif is_uint_type(abi_type):
return to_hex_with_size(value, data_size)
elif is_int_type(abi_type):
return to_hex_twos_compliment(value, data_size)
elif is_address_type(abi_type):
return pad_hex(value, data_size)
elif is_bytes_type(abi_type):
if is_bytes(value):
return encode_hex(value)
else:
return value
elif is_string_type(abi_type):
return to_hex(text=value)
else:
raise ValueError(
"Unsupported ABI type: {0}".format(abi_type)
)
def to_hex_twos_compliment(value, bit_size):
"""
Converts integer value to twos compliment hex representation with given bit_size
"""
if value >= 0:
return to_hex_with_size(value, bit_size)
value = (1 << bit_size) + value
hex_value = hex(value)
hex_value = hex_value.rstrip("L")
return hex_value
def to_hex_with_size(value, bit_size):
"""
Converts a value to hex with given bit_size:
"""
return pad_hex(to_hex(value), bit_size)
def pad_hex(value, bit_size):
"""
Pads a hex string up to the given bit_size
"""
value = remove_0x_prefix(value)
return add_0x_prefix(value.zfill(int(bit_size / 4)))
def trim_hex(hexstr):
if hexstr.startswith('0x0'):
hexstr = re.sub('^0x0+', '0x', hexstr)
if hexstr == '0x':
hexstr = '0x0'
return hexstr
def to_int(value=None, hexstr=None, text=None):
"""
Converts value to it's integer representation.
Values are converted this way:
* value:
* bytes: big-endian integer
* bool: True => 1, False => 0
* hexstr: interpret hex as integer
* text: interpret as string of digits, like '12' => 12
"""
assert_one_val(value, hexstr=hexstr, text=text)
if hexstr is not None:
return int(hexstr, 16)
elif text is not None:
return int(text)
elif isinstance(value, bytes):
return big_endian_to_int(value)
elif isinstance(value, str):
raise TypeError("Pass in strings with keyword hexstr or text")
else:
return int(value)
@curry
def pad_bytes(fill_with, num_bytes, unpadded):
return unpadded.rjust(num_bytes, fill_with)
zpad_bytes = pad_bytes(b'\0')
def to_bytes(primitive=None, hexstr=None, text=None):
assert_one_val(primitive, hexstr=hexstr, text=text)
if is_boolean(primitive):
return b'\x01' if primitive else b'\x00'
elif isinstance(primitive, bytes):
return primitive
elif is_integer(primitive):
return to_bytes(hexstr=to_hex(primitive))
elif hexstr is not None:
if len(hexstr) % 2:
hexstr = '0x0' + remove_0x_prefix(hexstr)
return decode_hex(hexstr)
elif text is not None:
return text.encode('utf-8')
raise TypeError("expected an int in first arg, or keyword of hexstr or text")
def to_text(primitive=None, hexstr=None, text=None):
assert_one_val(primitive, hexstr=hexstr, text=text)
if hexstr is not None:
return to_bytes(hexstr=hexstr).decode('utf-8')
elif text is not None:
return text
elif isinstance(primitive, str):
return to_text(hexstr=primitive)
elif isinstance(primitive, bytes):
return primitive.decode('utf-8')
elif is_integer(primitive):
byte_encoding = int_to_big_endian(primitive)
return to_text(byte_encoding)
raise TypeError("Expected an int, bytes or hexstr.")
@curry
def text_if_str(to_type, text_or_primitive):
"""
Convert to a type, assuming that strings can be only unicode text (not a hexstr)
@param to_type is a function that takes the arguments (primitive, hexstr=hexstr, text=text),
eg~ to_bytes, to_text, to_hex, to_int, etc
@param hexstr_or_primitive in bytes, str, or int.
"""
if isinstance(text_or_primitive, str):
(primitive, text) = (None, text_or_primitive)
else:
(primitive, text) = (text_or_primitive, None)
return to_type(primitive, text=text)
@curry
def hexstr_if_str(to_type, hexstr_or_primitive):
"""
Convert to a type, assuming that strings can be only hexstr (not unicode text)
@param to_type is a function that takes the arguments (primitive, hexstr=hexstr, text=text),
eg~ to_bytes, to_text, to_hex, to_int, etc
@param text_or_primitive in bytes, str, or int.
"""
if isinstance(hexstr_or_primitive, str):
(primitive, hexstr) = (None, hexstr_or_primitive)
if remove_0x_prefix(hexstr) and not is_hex(hexstr):
raise ValueError(
"when sending a str, it must be a hex string. Got: {0!r}".format(
hexstr_or_primitive,
)
)
else:
(primitive, hexstr) = (hexstr_or_primitive, None)
return to_type(primitive, hexstr=hexstr)
class FriendlyJsonSerde:
"""
Friendly JSON serializer & deserializer
When encoding or decoding fails, this class collects
information on which fields failed, to show more
helpful information in the raised error messages.
"""
def _json_mapping_errors(self, mapping):
for key, val in mapping.items():
try:
self._friendly_json_encode(val)
except TypeError as exc:
yield "%r: because (%s)" % (key, exc)
def _json_list_errors(self, iterable):
for index, element in enumerate(iterable):
try:
self._friendly_json_encode(element)
except TypeError as exc:
yield "%d: because (%s)" % (index, exc)
def _friendly_json_encode(self, obj, cls=None):
try:
encoded = json.dumps(obj, cls=cls)
return encoded
except TypeError as full_exception:
if hasattr(obj, 'items'):
item_errors = '; '.join(self._json_mapping_errors(obj))
raise TypeError("dict had unencodable value at keys: {{{}}}".format(item_errors))
elif is_list_like(obj):
element_errors = '; '.join(self._json_list_errors(obj))
raise TypeError("list had unencodable value at index: [{}]".format(element_errors))
else:
raise full_exception
def json_decode(self, json_str):
try:
decoded = json.loads(json_str)
return decoded
except json.decoder.JSONDecodeError as exc:
err_msg = 'Could not decode {} because of {}.'.format(repr(json_str), exc)
# Calling code may rely on catching JSONDecodeError to recognize bad json
# so we have to re-raise the same type.
raise json.decoder.JSONDecodeError(err_msg, exc.doc, exc.pos)
def json_encode(self, obj, cls=None):
try:
return self._friendly_json_encode(obj, cls=cls)
except TypeError as exc:
raise TypeError("Could not encode to JSON: {}".format(exc))
def to_4byte_hex(hex_or_str_or_bytes):
size_of_4bytes = 4 * 8
byte_str = hexstr_if_str(to_bytes, hex_or_str_or_bytes)
if len(byte_str) > 4:
raise ValueError(
'expected value of size 4 bytes. Got: %d bytes' % len(byte_str)
)
hex_str = encode_hex(byte_str)
return pad_hex(hex_str, size_of_4bytes)
class DynamicArrayPackedEncoder(BaseArrayEncoder):
is_dynamic = True
def encode(self, value):
encoded_elements = self.encode_elements(value)
encoded_value = encoded_elements
return encoded_value
# TODO: Replace with eth-abi packed encoder once web3 requires eth-abi>=2
def encode_single_packed(_type, value):
import codecs
from eth_abi import (
grammar as abi_type_parser,
)
from eth_abi.registry import has_arrlist, registry
abi_type = abi_type_parser.parse(_type)
if has_arrlist(_type):
item_encoder = registry.get_encoder(str(abi_type.item_type))
if abi_type.arrlist[-1] != 1:
return DynamicArrayPackedEncoder(item_encoder=item_encoder).encode(value)
else:
raise NotImplementedError(
"Fixed arrays are not implemented in this packed encoder prototype")
elif abi_type.base == "string":
return codecs.encode(value, 'utf8')
elif abi_type.base == "bytes":
return value
class Web3JsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, AttributeDict):
return {k: v for k, v in obj.items()}
if isinstance(obj, HexBytes):
return obj.hex()
return json.JSONEncoder.default(self, obj)
def to_json(obj):
'''
Convert a complex object (like a transaction object) to a JSON string
'''
return FriendlyJsonSerde().json_encode(obj, cls=Web3JsonEncoder) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/encoding.py | encoding.py |
import threading
import time
class Timeout(Exception):
"""
A limited subset of the `gevent.Timeout` context manager.
"""
seconds = None
exception = None
begun_at = None
is_running = None
def __init__(self, seconds=None, exception=None, *args, **kwargs):
self.seconds = seconds
self.exception = exception
def __enter__(self):
self.start()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return False
def __str__(self):
if self.seconds is None:
return ''
return "{0} seconds".format(self.seconds)
@property
def expire_at(self):
if self.seconds is None:
raise ValueError("Timeouts with `seconds == None` do not have an expiration time")
elif self.begun_at is None:
raise ValueError("Timeout has not been started")
return self.begun_at + self.seconds
def start(self):
if self.is_running is not None:
raise ValueError("Timeout has already been started")
self.begun_at = time.time()
self.is_running = True
def check(self):
if self.is_running is None:
raise ValueError("Timeout has not been started")
elif self.is_running is False:
raise ValueError("Timeout has already been cancelled")
elif self.seconds is None:
return
elif time.time() > self.expire_at:
self.is_running = False
if isinstance(self.exception, type):
raise self.exception(str(self))
elif isinstance(self.exception, Exception):
raise self.exception
else:
raise self
def cancel(self):
self.is_running = False
def sleep(self, seconds):
time.sleep(seconds)
self.check()
class ThreadWithReturn(threading.Thread):
def __init__(self, target=None, args=None, kwargs=None):
super().__init__(
target=target,
args=args or tuple(),
kwargs=kwargs or {},
)
self.target = target
self.args = args
self.kwargs = kwargs
def run(self):
self._return = self.target(*self.args, **self.kwargs)
def get(self, timeout=None):
self.join(timeout)
try:
return self._return
except AttributeError:
raise RuntimeError("Something went wrong. No `_return` property was set")
class TimerClass(threading.Thread):
def __init__(self, interval, callback, *args):
threading.Thread.__init__(self)
self.callback = callback
self.terminate_event = threading.Event()
self.interval = interval
self.args = args
def run(self):
while not self.terminate_event.is_set():
self.callback(*self.args)
self.terminate_event.wait(self.interval)
def stop(self):
self.terminate_event.set()
def spawn(target, *args, thread_class=ThreadWithReturn, **kwargs):
thread = thread_class(
target=target,
args=args,
kwargs=kwargs,
)
thread.daemon = True
thread.start()
return thread | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/threads.py | threads.py |
import itertools
from eth_utils import (
function_abi_to_4byte_selector,
is_0x_prefixed,
is_binary_address,
is_boolean,
is_bytes,
is_checksum_address,
is_dict,
is_hex_address,
is_integer,
is_list_like,
is_string,
)
from eth_utils.hexadecimal import (
encode_hex,
)
from web3._utils.abi import (
abi_to_signature,
filter_by_type,
is_address_type,
is_array_type,
is_bool_type,
is_bytes_type,
is_int_type,
is_recognized_type,
is_string_type,
is_uint_type,
length_of_array_type,
sub_type_of_array_type,
)
from web3._utils.formatters import (
apply_formatter_to_array,
)
from web3._utils.toolz import (
compose,
groupby,
valfilter,
valmap,
)
from web3.exceptions import (
InvalidAddress,
)
def _prepare_selector_collision_msg(duplicates):
dup_sel = valmap(apply_formatter_to_array(abi_to_signature), duplicates)
joined_funcs = valmap(lambda funcs: ', '.join(funcs), dup_sel)
func_sel_msg_list = [funcs + ' have selector ' + sel for sel, funcs in joined_funcs.items()]
return ' and\n'.join(func_sel_msg_list)
def validate_abi(abi):
"""
Helper function for validating an ABI
"""
if not is_list_like(abi):
raise ValueError("'abi' is not a list")
if not all(is_dict(e) for e in abi):
raise ValueError("'abi' is not a list of dictionaries")
functions = filter_by_type('function', abi)
selectors = groupby(
compose(encode_hex, function_abi_to_4byte_selector),
functions
)
duplicates = valfilter(lambda funcs: len(funcs) > 1, selectors)
if duplicates:
raise ValueError(
'Abi contains functions with colliding selectors. '
'Functions {0}'.format(_prepare_selector_collision_msg(duplicates))
)
def validate_abi_type(abi_type):
"""
Helper function for validating an abi_type
"""
if not is_recognized_type(abi_type):
raise ValueError("Unrecognized abi_type: {abi_type}".format(abi_type=abi_type))
def validate_abi_value(abi_type, value):
"""
Helper function for validating a value against the expected abi_type
Note: abi_type 'bytes' must either be python3 'bytes' object or ''
"""
if is_array_type(abi_type) and is_list_like(value):
# validate length
specified_length = length_of_array_type(abi_type)
if specified_length is not None:
if specified_length < 1:
raise TypeError(
"Invalid abi-type: {abi_type}. Length of fixed sized arrays"
"must be greater than 0."
.format(abi_type=abi_type)
)
if specified_length != len(value):
raise TypeError(
"The following array length does not the length specified"
"by the abi-type, {abi_type}: {value}"
.format(abi_type=abi_type, value=value)
)
# validate sub_types
sub_type = sub_type_of_array_type(abi_type)
for v in value:
validate_abi_value(sub_type, v)
return
elif is_bool_type(abi_type) and is_boolean(value):
return
elif is_uint_type(abi_type) and is_integer(value) and value >= 0:
return
elif is_int_type(abi_type) and is_integer(value):
return
elif is_address_type(abi_type):
validate_address(value)
return
elif is_bytes_type(abi_type):
if is_bytes(value):
return
elif is_string(value):
if is_0x_prefixed(value):
return
else:
raise TypeError(
"ABI values of abi-type 'bytes' must be either"
"a python3 'bytes' object or an '0x' prefixed string."
)
elif is_string_type(abi_type) and is_string(value):
return
raise TypeError(
"The following abi value is not a '{abi_type}': {value}"
.format(abi_type=abi_type, value=value)
)
def validate_address(value):
"""
Helper function for validating an address
"""
if is_bytes(value):
if not is_binary_address(value):
raise InvalidAddress("Address must be 20 bytes when input type is bytes", value)
return
if not isinstance(value, str):
raise TypeError('Address {} must be provided as a string'.format(value))
if not is_hex_address(value):
raise InvalidAddress("Address must be 20 bytes, as a hex string with a 0x prefix", value)
if not is_checksum_address(value):
if value == value.lower():
raise InvalidAddress(
"Web3.py only accepts checksum addresses. "
"The software that gave you this non-checksum address should be considered unsafe, "
"please file it as a bug on their platform. "
"Try using an ENS name instead. Or, if you must accept lower safety, "
"use Web3.toChecksumAddress(lower_case_address).",
value,
)
else:
raise InvalidAddress(
"Address has an invalid EIP-55 checksum. "
"After looking up the address from the original source, try again.",
value,
)
def has_one_val(*args, **kwargs):
vals = itertools.chain(args, kwargs.values())
not_nones = list(filter(lambda val: val is not None, vals))
return len(not_nones) == 1
def assert_one_val(*args, **kwargs):
if not has_one_val(*args, **kwargs):
raise TypeError(
"Exactly one of the passed values can be specified. "
"Instead, values were: %r, %r" % (args, kwargs)
) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/validation.py | validation.py |
import functools
from eth_abi import (
encode_abi as eth_abi_encode_abi,
)
from eth_utils import (
add_0x_prefix,
encode_hex,
function_abi_to_4byte_selector,
is_text,
)
from hexbytes import (
HexBytes,
)
from web3._utils.abi import (
abi_to_signature,
check_if_arguments_can_be_encoded,
filter_by_argument_count,
filter_by_argument_name,
filter_by_encodability,
filter_by_name,
filter_by_type,
get_abi_input_types,
get_abi_inputs,
get_fallback_func_abi,
map_abi_data,
merge_args_and_kwargs,
)
from web3._utils.encoding import (
to_hex,
)
from web3._utils.function_identifiers import (
FallbackFn,
)
from web3._utils.normalizers import (
abi_address_to_hex,
abi_bytes_to_bytes,
abi_ens_resolver,
abi_string_to_text,
)
from web3._utils.toolz import (
pipe,
valmap,
)
from web3.exceptions import (
ValidationError,
)
def find_matching_event_abi(abi, event_name=None, argument_names=None):
filters = [
functools.partial(filter_by_type, 'event'),
]
if event_name is not None:
filters.append(functools.partial(filter_by_name, event_name))
if argument_names is not None:
filters.append(
functools.partial(filter_by_argument_name, argument_names)
)
event_abi_candidates = pipe(abi, *filters)
if len(event_abi_candidates) == 1:
return event_abi_candidates[0]
elif not event_abi_candidates:
raise ValueError("No matching events found")
else:
raise ValueError("Multiple events found")
def find_matching_fn_abi(abi, fn_identifier=None, args=None, kwargs=None):
args = args or tuple()
kwargs = kwargs or dict()
num_arguments = len(args) + len(kwargs)
if fn_identifier is FallbackFn:
return get_fallback_func_abi(abi)
if not is_text(fn_identifier):
raise TypeError("Unsupported function identifier")
name_filter = functools.partial(filter_by_name, fn_identifier)
arg_count_filter = functools.partial(filter_by_argument_count, num_arguments)
encoding_filter = functools.partial(filter_by_encodability, args, kwargs)
function_candidates = pipe(abi, name_filter, arg_count_filter, encoding_filter)
if len(function_candidates) == 1:
return function_candidates[0]
else:
matching_identifiers = name_filter(abi)
matching_function_signatures = [abi_to_signature(func) for func in matching_identifiers]
arg_count_matches = len(arg_count_filter(matching_identifiers))
encoding_matches = len(encoding_filter(matching_identifiers))
if arg_count_matches == 0:
diagnosis = "\nFunction invocation failed due to improper number of arguments."
elif encoding_matches == 0:
diagnosis = "\nFunction invocation failed due to no matching argument types."
elif encoding_matches > 1:
diagnosis = (
"\nAmbiguous argument encoding. "
"Provided arguments can be encoded to multiple functions matching this call."
)
message = (
"\nCould not identify the intended function with name `{name}`, "
"positional argument(s) of type `{arg_types}` and "
"keyword argument(s) of type `{kwarg_types}`."
"\nFound {num_candidates} function(s) with the name `{name}`: {candidates}"
"{diagnosis}"
).format(
name=fn_identifier,
arg_types=tuple(map(type, args)),
kwarg_types=valmap(type, kwargs),
num_candidates=len(matching_identifiers),
candidates=matching_function_signatures,
diagnosis=diagnosis,
)
raise ValidationError(message)
def encode_abi(web3, abi, arguments, data=None):
argument_types = get_abi_input_types(abi)
if not check_if_arguments_can_be_encoded(abi, arguments, {}):
raise TypeError(
"One or more arguments could not be encoded to the necessary "
"ABI type. Expected types are: {0}".format(
', '.join(argument_types),
)
)
normalizers = [
abi_ens_resolver(web3),
abi_address_to_hex,
abi_bytes_to_bytes,
abi_string_to_text,
]
normalized_arguments = map_abi_data(
normalizers,
argument_types,
arguments,
)
encoded_arguments = eth_abi_encode_abi(
argument_types,
normalized_arguments,
)
if data:
return to_hex(HexBytes(data) + encoded_arguments)
else:
return encode_hex(encoded_arguments)
def prepare_transaction(
address,
web3,
fn_identifier,
contract_abi=None,
fn_abi=None,
transaction=None,
fn_args=None,
fn_kwargs=None):
"""
:parameter `is_function_abi` is used to distinguish function abi from contract abi
Returns a dictionary of the transaction that could be used to call this
TODO: make this a public API
TODO: add new prepare_deploy_transaction API
"""
if fn_abi is None:
fn_abi = find_matching_fn_abi(contract_abi, fn_identifier, fn_args, fn_kwargs)
validate_payable(transaction, fn_abi)
if transaction is None:
prepared_transaction = {}
else:
prepared_transaction = dict(**transaction)
if 'data' in prepared_transaction:
raise ValueError("Transaction parameter may not contain a 'data' key")
if address:
prepared_transaction.setdefault('to', address)
prepared_transaction['data'] = encode_transaction_data(
web3,
fn_identifier,
contract_abi,
fn_abi,
fn_args,
fn_kwargs,
)
return prepared_transaction
def encode_transaction_data(
web3,
fn_identifier,
contract_abi=None,
fn_abi=None,
args=None,
kwargs=None):
if fn_identifier is FallbackFn:
fn_abi, fn_selector, fn_arguments = get_fallback_function_info(contract_abi, fn_abi)
elif is_text(fn_identifier):
fn_abi, fn_selector, fn_arguments = get_function_info(
fn_identifier, contract_abi, fn_abi, args, kwargs,
)
else:
raise TypeError("Unsupported function identifier")
return add_0x_prefix(encode_abi(web3, fn_abi, fn_arguments, fn_selector))
def get_fallback_function_info(contract_abi=None, fn_abi=None):
if fn_abi is None:
fn_abi = get_fallback_func_abi(contract_abi)
fn_selector = encode_hex(b'')
fn_arguments = tuple()
return fn_abi, fn_selector, fn_arguments
def get_function_info(fn_name, contract_abi=None, fn_abi=None, args=None, kwargs=None):
if args is None:
args = tuple()
if kwargs is None:
kwargs = {}
if fn_abi is None:
fn_abi = find_matching_fn_abi(contract_abi, fn_name, args, kwargs)
fn_selector = encode_hex(function_abi_to_4byte_selector(fn_abi))
fn_arguments = merge_args_and_kwargs(fn_abi, args, kwargs)
_, fn_arguments = get_abi_inputs(fn_abi, fn_arguments)
return fn_abi, fn_selector, fn_arguments
def validate_payable(transaction, abi):
"""Raise ValidationError if non-zero ether
is sent to a non payable function.
"""
if 'value' in transaction:
if transaction['value'] != 0:
if "payable" in abi and not abi["payable"]:
raise ValidationError(
"Sending non-zero ether to a contract function "
"with payable=False. Please ensure that "
"transaction's value is 0."
) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/contracts.py | contracts.py |
import math
from web3._utils.threads import (
Timeout,
)
from web3._utils.toolz import (
assoc,
curry,
merge,
)
VALID_TRANSACTION_PARAMS = [
'from',
'to',
'gas',
'gasPrice',
'value',
'data',
'nonce',
'chainId',
]
TRANSACTION_DEFAULTS = {
'value': 0,
'data': b'',
'gas': lambda web3, tx: web3.eth.estimateGas(tx),
'gasPrice': lambda web3, tx: web3.eth.generateGasPrice(tx) or web3.eth.gasPrice,
'chainId': lambda web3, tx: web3.net.chainId,
}
@curry
def fill_nonce(web3, transaction):
if 'from' in transaction and 'nonce' not in transaction:
return assoc(
transaction,
'nonce',
web3.eth.getTransactionCount(
transaction['from'],
block_identifier='pending'))
else:
return transaction
@curry
def fill_transaction_defaults(web3, transaction):
"""
if web3 is None, fill as much as possible while offline
"""
defaults = {}
for key, default_getter in TRANSACTION_DEFAULTS.items():
if key not in transaction:
if callable(default_getter):
if web3 is not None:
default_val = default_getter(web3, transaction)
else:
raise ValueError("You must specify %s in the transaction" % key)
else:
default_val = default_getter
defaults[key] = default_val
return merge(defaults, transaction)
def wait_for_transaction_receipt(web3, txn_hash, timeout=120, poll_latency=0.1):
with Timeout(timeout) as _timeout:
while True:
txn_receipt = web3.eth.getTransactionReceipt(txn_hash)
# FIXME: The check for a null `blockHash` is due to parity's
# non-standard implementation of the JSON-RPC API and should
# be removed once the formal spec for the JSON-RPC endpoints
# has been finalized.
if txn_receipt is not None and txn_receipt['blockHash'] is not None:
break
_timeout.sleep(poll_latency)
return txn_receipt
def get_block_gas_limit(web3, block_identifier=None):
if block_identifier is None:
block_identifier = web3.eth.blockNumber
block = web3.eth.getBlock(block_identifier)
return block['gasLimit']
def get_buffered_gas_estimate(web3, transaction, gas_buffer=100000):
gas_estimate_transaction = dict(**transaction)
gas_estimate = web3.eth.estimateGas(gas_estimate_transaction)
gas_limit = get_block_gas_limit(web3)
if gas_estimate > gas_limit:
raise ValueError(
"Contract does not appear to be deployable within the "
"current network gas limits. Estimated: {0}. Current gas "
"limit: {1}".format(gas_estimate, gas_limit)
)
return min(gas_limit, gas_estimate + gas_buffer)
def get_required_transaction(web3, transaction_hash):
current_transaction = web3.eth.getTransaction(transaction_hash)
if not current_transaction:
raise ValueError('Supplied transaction with hash {} does not exist'
.format(transaction_hash))
return current_transaction
def extract_valid_transaction_params(transaction_params):
extracted_params = {key: transaction_params[key]
for key in VALID_TRANSACTION_PARAMS if key in transaction_params}
if extracted_params.get('data') is not None:
if transaction_params.get('input') is not None:
if extracted_params['data'] != transaction_params['input']:
msg = 'failure to handle this transaction due to both "input: {}" and'
msg += ' "data: {}" are populated. You need to resolve this conflict.'
err_vals = (transaction_params['input'], extracted_params['data'])
raise AttributeError(msg.format(*err_vals))
else:
return extracted_params
else:
return extracted_params
elif extracted_params.get('data') is None:
if transaction_params.get('input') is not None:
return assoc(extracted_params, 'data', transaction_params['input'])
else:
return extracted_params
else:
raise Exception("Unreachable path: transaction's 'data' is either set or not set")
def assert_valid_transaction_params(transaction_params):
for param in transaction_params:
if param not in VALID_TRANSACTION_PARAMS:
raise ValueError('{} is not a valid transaction parameter'.format(param))
def prepare_replacement_transaction(web3, current_transaction, new_transaction):
if current_transaction['blockHash'] is not None:
raise ValueError('Supplied transaction with hash {} has already been mined'
.format(current_transaction['hash']))
if 'nonce' in new_transaction and new_transaction['nonce'] != current_transaction['nonce']:
raise ValueError('Supplied nonce in new_transaction must match the pending transaction')
if 'nonce' not in new_transaction:
new_transaction = assoc(new_transaction, 'nonce', current_transaction['nonce'])
if 'gasPrice' in new_transaction:
if new_transaction['gasPrice'] <= current_transaction['gasPrice']:
raise ValueError('Supplied gas price must exceed existing transaction gas price')
else:
generated_gas_price = web3.eth.generateGasPrice(new_transaction)
minimum_gas_price = int(math.ceil(current_transaction['gasPrice'] * 1.1))
if generated_gas_price and generated_gas_price > minimum_gas_price:
new_transaction = assoc(new_transaction, 'gasPrice', generated_gas_price)
else:
new_transaction = assoc(new_transaction, 'gasPrice', minimum_gas_price)
return new_transaction
def replace_transaction(web3, current_transaction, new_transaction):
new_transaction = prepare_replacement_transaction(
web3, current_transaction, new_transaction
)
return web3.eth.sendTransaction(new_transaction) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/transactions.py | transactions.py |
from collections import (
Iterable,
Mapping,
)
from eth_utils import (
is_dict,
is_list_like,
is_string,
to_dict,
to_list,
)
from web3._utils.decorators import (
reject_recursive_repeats,
)
from web3._utils.toolz import (
compose,
curry,
dissoc,
)
def hex_to_integer(value):
return int(value, 16)
integer_to_hex = hex
@curry
@to_list
def apply_formatter_at_index(formatter, at_index, value):
if at_index + 1 > len(value):
raise IndexError(
"Not enough values in iterable to apply formatter. Got: {0}. "
"Need: {1}".format(len(value), at_index + 1)
)
for index, item in enumerate(value):
if index == at_index:
yield formatter(item)
else:
yield item
def apply_formatters_to_args(*formatters):
return compose(*(
apply_formatter_at_index(formatter, index)
for index, formatter
in enumerate(formatters)
))
@curry
def apply_formatter_if(condition, formatter, value):
if condition(value):
return formatter(value)
else:
return value
@curry
@to_dict
def apply_formatters_to_dict(formatters, value):
for key, item in value.items():
if key in formatters:
try:
yield key, formatters[key](item)
except (TypeError, ValueError) as exc:
raise type(exc)("Could not format value %r as field %r" % (item, key)) from exc
else:
yield key, item
@curry
@to_list
def apply_formatter_to_array(formatter, value):
for item in value:
yield formatter(item)
@curry
def apply_one_of_formatters(formatter_condition_pairs, value):
for formatter, condition in formatter_condition_pairs:
if condition(value):
return formatter(value)
else:
raise ValueError("The provided value did not satisfy any of the formatter conditions")
def map_collection(func, collection):
"""
Apply func to each element of a collection, or value of a dictionary.
If the value is not a collection, return it unmodified
"""
datatype = type(collection)
if isinstance(collection, Mapping):
return datatype((key, func(val)) for key, val in collection.items())
if is_string(collection):
return collection
elif isinstance(collection, Iterable):
return datatype(map(func, collection))
else:
return collection
@reject_recursive_repeats
def recursive_map(func, data):
"""
Apply func to data, and any collection items inside data (using map_collection).
Define func so that it only applies to the type of value that you want it to apply to.
"""
def recurse(item):
return recursive_map(func, item)
items_mapped = map_collection(recurse, data)
return func(items_mapped)
def static_return(value):
def inner(*args, **kwargs):
return value
return inner
def static_result(value):
def inner(*args, **kwargs):
return {'result': value}
return inner
@curry
@to_dict
def apply_key_map(key_mappings, value):
for key, item in value.items():
if key in key_mappings:
yield key_mappings[key], item
else:
yield key, item
def is_array_of_strings(value):
if not is_list_like(value):
return False
return all((is_string(item) for item in value))
def is_array_of_dicts(value):
if not is_list_like(value):
return False
return all((is_dict(item) for item in value))
@curry
def remove_key_if(key, remove_if, input_dict):
if key in input_dict and remove_if(input_dict):
return dissoc(input_dict, key)
else:
return input_dict | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/formatters.py | formatters.py |
from abc import (
ABC,
abstractmethod,
)
import itertools
from eth_abi import (
decode_abi,
decode_single,
encode_single,
)
from eth_utils import (
encode_hex,
event_abi_to_log_topic,
is_list_like,
keccak,
to_dict,
to_hex,
to_tuple,
)
import web3
from web3._utils.encoding import (
encode_single_packed,
hexstr_if_str,
to_bytes,
)
from web3._utils.formatters import (
apply_formatter_if,
)
from web3._utils.normalizers import (
BASE_RETURN_NORMALIZERS,
)
from web3._utils.toolz import (
complement,
compose,
cons,
curry,
valfilter,
)
from web3.datastructures import (
AttributeDict,
)
from web3.exceptions import (
MismatchedABI,
)
from .abi import (
exclude_indexed_event_inputs,
get_abi_input_names,
get_indexed_event_inputs,
map_abi_data,
normalize_event_input_types,
process_type,
)
def construct_event_topic_set(event_abi, arguments=None):
if arguments is None:
arguments = {}
if isinstance(arguments, (list, tuple)):
if len(arguments) != len(event_abi['inputs']):
raise ValueError(
"When passing an argument list, the number of arguments must "
"match the event constructor."
)
arguments = {
arg['name']: [arg_value]
for arg, arg_value
in zip(event_abi['inputs'], arguments)
}
normalized_args = {
key: value if is_list_like(value) else [value]
for key, value in arguments.items()
}
event_topic = encode_hex(event_abi_to_log_topic(event_abi))
indexed_args = get_indexed_event_inputs(event_abi)
zipped_abi_and_args = [
(arg, normalized_args.get(arg['name'], [None]))
for arg in indexed_args
]
encoded_args = [
[
None if option is None else encode_hex(encode_single(arg['type'], option))
for option in arg_options]
for arg, arg_options in zipped_abi_and_args
]
topics = list(normalize_topic_list([event_topic] + encoded_args))
return topics
def construct_event_data_set(event_abi, arguments=None):
if arguments is None:
arguments = {}
if isinstance(arguments, (list, tuple)):
if len(arguments) != len(event_abi['inputs']):
raise ValueError(
"When passing an argument list, the number of arguments must "
"match the event constructor."
)
arguments = {
arg['name']: [arg_value]
for arg, arg_value
in zip(event_abi['inputs'], arguments)
}
normalized_args = {
key: value if is_list_like(value) else [value]
for key, value in arguments.items()
}
non_indexed_args = exclude_indexed_event_inputs(event_abi)
zipped_abi_and_args = [
(arg, normalized_args.get(arg['name'], [None]))
for arg in non_indexed_args
]
encoded_args = [
[
None if option is None else encode_hex(encode_single(arg['type'], option))
for option in arg_options]
for arg, arg_options in zipped_abi_and_args
]
data = [
list(permutation)
if any(value is not None for value in permutation)
else []
for permutation in itertools.product(*encoded_args)
]
return data
def is_dynamic_sized_type(_type):
base_type, type_size, arrlist = process_type(_type)
if arrlist:
return True
elif base_type == 'string':
return True
elif base_type == 'bytes' and type_size == '':
return True
return False
@to_tuple
def get_event_abi_types_for_decoding(event_inputs):
"""
Event logs use the `sha3(value)` for indexed inputs of type `bytes` or
`string`. Because of this we need to modify the types so that we can
decode the log entries using the correct types.
"""
for input_abi in event_inputs:
if input_abi['indexed'] and is_dynamic_sized_type(input_abi['type']):
yield 'bytes32'
else:
yield input_abi['type']
@curry
def get_event_data(event_abi, log_entry):
"""
Given an event ABI and a log entry for that event, return the decoded
event data
"""
if event_abi['anonymous']:
log_topics = log_entry['topics']
elif not log_entry['topics']:
raise MismatchedABI("Expected non-anonymous event to have 1 or more topics")
elif event_abi_to_log_topic(event_abi) != log_entry['topics'][0]:
raise MismatchedABI("The event signature did not match the provided ABI")
else:
log_topics = log_entry['topics'][1:]
log_topics_abi = get_indexed_event_inputs(event_abi)
log_topic_normalized_inputs = normalize_event_input_types(log_topics_abi)
log_topic_types = get_event_abi_types_for_decoding(log_topic_normalized_inputs)
log_topic_names = get_abi_input_names({'inputs': log_topics_abi})
if len(log_topics) != len(log_topic_types):
raise ValueError("Expected {0} log topics. Got {1}".format(
len(log_topic_types),
len(log_topics),
))
log_data = hexstr_if_str(to_bytes, log_entry['data'])
log_data_abi = exclude_indexed_event_inputs(event_abi)
log_data_normalized_inputs = normalize_event_input_types(log_data_abi)
log_data_types = get_event_abi_types_for_decoding(log_data_normalized_inputs)
log_data_names = get_abi_input_names({'inputs': log_data_abi})
# sanity check that there are not name intersections between the topic
# names and the data argument names.
duplicate_names = set(log_topic_names).intersection(log_data_names)
if duplicate_names:
raise ValueError(
"Invalid Event ABI: The following argument names are duplicated "
"between event inputs: '{0}'".format(', '.join(duplicate_names))
)
decoded_log_data = decode_abi(log_data_types, log_data)
normalized_log_data = map_abi_data(
BASE_RETURN_NORMALIZERS,
log_data_types,
decoded_log_data
)
decoded_topic_data = [
decode_single(topic_type, topic_data)
for topic_type, topic_data
in zip(log_topic_types, log_topics)
]
normalized_topic_data = map_abi_data(
BASE_RETURN_NORMALIZERS,
log_topic_types,
decoded_topic_data
)
event_args = dict(itertools.chain(
zip(log_topic_names, normalized_topic_data),
zip(log_data_names, normalized_log_data),
))
event_data = {
'args': event_args,
'event': event_abi['name'],
'logIndex': log_entry['logIndex'],
'transactionIndex': log_entry['transactionIndex'],
'transactionHash': log_entry['transactionHash'],
'address': log_entry['address'],
'blockHash': log_entry['blockHash'],
'blockNumber': log_entry['blockNumber'],
}
return AttributeDict.recursive(event_data)
@to_tuple
def pop_singlets(seq):
yield from (i[0] if is_list_like(i) and len(i) == 1 else i for i in seq)
@curry
def remove_trailing_from_seq(seq, remove_value=None):
index = len(seq)
while index > 0 and seq[index - 1] == remove_value:
index -= 1
return seq[:index]
normalize_topic_list = compose(
remove_trailing_from_seq(remove_value=None),
pop_singlets,)
def is_indexed(arg):
if isinstance(arg, TopicArgumentFilter) is True:
return True
return False
is_not_indexed = complement(is_indexed)
class EventFilterBuilder:
formatter = None
_fromBlock = None
_toBlock = None
_address = None
_immutable = False
def __init__(self, event_abi, formatter=None):
self.event_abi = event_abi
self.formatter = formatter
self.event_topic = initialize_event_topics(self.event_abi)
self.args = AttributeDict(
_build_argument_filters_from_event_abi(event_abi))
self._ordered_arg_names = tuple(arg['name'] for arg in event_abi['inputs'])
@property
def fromBlock(self):
return self._fromBlock
@fromBlock.setter
def fromBlock(self, value):
if self._fromBlock is None and not self._immutable:
self._fromBlock = value
else:
raise ValueError(
"fromBlock is already set to {0}. "
"Resetting filter parameters is not permitted".format(self._fromBlock))
@property
def toBlock(self):
return self._toBlock
@toBlock.setter
def toBlock(self, value):
if self._toBlock is None and not self._immutable:
self._toBlock = value
else:
raise ValueError(
"toBlock is already set to {0}. "
"Resetting filter parameters is not permitted".format(self._toBlock))
@property
def address(self):
return self._address
@address.setter
def address(self, value):
if self._address is None and not self._immutable:
self._address = value
else:
raise ValueError(
"address is already set to {0}. "
"Resetting filter parameters is not permitted".format(self.address))
@property
def ordered_args(self):
return tuple(map(self.args.__getitem__, self._ordered_arg_names))
@property
@to_tuple
def indexed_args(self):
return tuple(filter(is_indexed, self.ordered_args))
@property
@to_tuple
def data_args(self):
return tuple(filter(is_not_indexed, self.ordered_args))
@property
def topics(self):
arg_topics = tuple(arg.match_values for arg in self.indexed_args)
return normalize_topic_list(cons(to_hex(self.event_topic), arg_topics))
@property
def data_argument_values(self):
if self.data_args is not None:
return tuple(arg.match_values for arg in self.data_args)
else:
return (None,)
@property
def filter_params(self):
params = {
"topics": self.topics,
"fromBlock": self.fromBlock,
"toBlock": self.toBlock,
"address": self.address
}
return valfilter(lambda x: x is not None, params)
def deploy(self, w3):
if not isinstance(w3, web3.Web3):
raise ValueError("Invalid web3 argument: got: {0}".format(repr(w3)))
for arg in self.args.values():
arg._immutable = True
self._immutable = True
log_filter = w3.eth.filter(self.filter_params)
log_filter.filter_params = self.filter_params
log_filter.set_data_filters(self.data_argument_values)
log_filter.builder = self
if self.formatter is not None:
log_filter.log_entry_formatter = self.formatter
return log_filter
def initialize_event_topics(event_abi):
if event_abi['anonymous'] is False:
return event_abi_to_log_topic(event_abi)
else:
return list()
@to_dict
def _build_argument_filters_from_event_abi(event_abi):
for item in event_abi['inputs']:
key = item['name']
if item['indexed'] is True:
value = TopicArgumentFilter(arg_type=item['type'])
else:
value = DataArgumentFilter(arg_type=item['type'])
yield key, value
array_to_tuple = apply_formatter_if(is_list_like, tuple)
@to_tuple
def _normalize_match_values(match_values):
for value in match_values:
yield array_to_tuple(value)
class BaseArgumentFilter(ABC):
_match_values = None
_immutable = False
def __init__(self, arg_type):
self.arg_type = arg_type
def match_single(self, value):
if self._immutable:
raise ValueError("Setting values is forbidden after filter is deployed.")
if self._match_values is None:
self._match_values = _normalize_match_values((value,))
else:
raise ValueError("An argument match value/s has already been set.")
def match_any(self, *values):
if self._immutable:
raise ValueError("Setting values is forbidden after filter is deployed.")
if self._match_values is None:
self._match_values = _normalize_match_values(values)
else:
raise ValueError("An argument match value/s has already been set.")
@property
@abstractmethod
def match_values(self):
pass
class DataArgumentFilter(BaseArgumentFilter):
@property
def match_values(self):
return (self.arg_type, self._match_values)
class TopicArgumentFilter(BaseArgumentFilter):
@to_tuple
def _get_match_values(self):
yield from (self._encode(value) for value in self._match_values)
@property
def match_values(self):
if self._match_values is not None:
return self._get_match_values()
else:
return None
def _encode(self, value):
if is_dynamic_sized_type(self.arg_type):
return to_hex(keccak(encode_single_packed(self.arg_type, value)))
else:
return to_hex(encode_single(self.arg_type, value)) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/events.py | events.py |
import binascii
from collections import (
namedtuple,
)
import itertools
import re
from eth_abi import (
decoding,
encoding,
)
from eth_abi.codec import (
ABICodec,
)
from eth_abi.grammar import (
parse as parse_type_string,
)
from eth_abi.registry import (
BaseEquals,
registry as default_registry,
)
from eth_utils import (
decode_hex,
is_bytes,
is_list_like,
is_text,
to_text,
to_tuple,
)
from eth_utils.abi import (
collapse_if_tuple,
)
from web3._utils.ens import (
is_ens_name,
)
from web3._utils.formatters import (
recursive_map,
)
from web3._utils.toolz import (
curry,
partial,
pipe,
)
from web3.exceptions import (
FallbackNotFound,
)
def filter_by_type(_type, contract_abi):
return [abi for abi in contract_abi if abi['type'] == _type]
def filter_by_name(name, contract_abi):
return [
abi
for abi
in contract_abi
if (
abi['type'] not in ('fallback', 'constructor') and
abi['name'] == name
)
]
def get_abi_input_types(abi):
if 'inputs' not in abi and abi['type'] == 'fallback':
return []
else:
return [collapse_if_tuple(abi_input) for abi_input in abi['inputs']]
def get_abi_output_types(abi):
if abi['type'] == 'fallback':
return []
else:
return [collapse_if_tuple(arg) for arg in abi['outputs']]
def get_abi_input_names(abi):
if 'inputs' not in abi and abi['type'] == 'fallback':
return []
else:
return [arg['name'] for arg in abi['inputs']]
def get_fallback_func_abi(contract_abi):
fallback_abis = filter_by_type('fallback', contract_abi)
if fallback_abis:
return fallback_abis[0]
else:
raise FallbackNotFound("No fallback function was found in the contract ABI.")
def fallback_func_abi_exists(contract_abi):
return filter_by_type('fallback', contract_abi)
def get_indexed_event_inputs(event_abi):
return [arg for arg in event_abi['inputs'] if arg['indexed'] is True]
def exclude_indexed_event_inputs(event_abi):
return [arg for arg in event_abi['inputs'] if arg['indexed'] is False]
def filter_by_argument_count(num_arguments, contract_abi):
return [
abi
for abi
in contract_abi
if len(abi['inputs']) == num_arguments
]
def filter_by_argument_name(argument_names, contract_abi):
return [
abi
for abi in contract_abi
if set(argument_names).intersection(
get_abi_input_names(abi)
) == set(argument_names)
]
try:
from eth_abi.abi import (
process_type,
collapse_type,
)
except ImportError:
from eth_abi.grammar import (
normalize as normalize_type_string,
)
def process_type(type_str):
normalized_type_str = normalize_type_string(type_str)
abi_type = parse_type_string(normalized_type_str)
abi_type.validate()
if hasattr(abi_type, 'base'):
base = abi_type.base
else:
base = str(abi_type.item_type)
if hasattr(abi_type, 'sub'):
sub = abi_type.sub
else:
sub = None
if isinstance(sub, tuple):
sub = 'x'.join(map(str, sub))
elif isinstance(sub, int):
sub = str(sub)
else:
sub = ''
arrlist = abi_type.arrlist
if isinstance(arrlist, tuple):
arrlist = list(map(list, arrlist))
else:
arrlist = []
return base, sub, arrlist
def collapse_type(base, sub, arrlist):
return base + str(sub) + ''.join(map(repr, arrlist))
class AddressEncoder(encoding.AddressEncoder):
@classmethod
def validate_value(cls, value):
if is_ens_name(value):
return
super().validate_value(value)
class AcceptsHexStrMixin:
def validate_value(self, value):
if is_text(value):
try:
value = decode_hex(value)
except binascii.Error:
self.invalidate_value(
value,
msg='invalid hex string',
)
super().validate_value(value)
class BytesEncoder(AcceptsHexStrMixin, encoding.BytesEncoder):
pass
class ByteStringEncoder(AcceptsHexStrMixin, encoding.ByteStringEncoder):
pass
class TextStringEncoder(encoding.TextStringEncoder):
@classmethod
def validate_value(cls, value):
if is_bytes(value):
try:
value = to_text(value)
except UnicodeDecodeError:
cls.invalidate_value(
value,
msg='not decodable as unicode string',
)
super().validate_value(value)
# We make a copy here just to make sure that eth-abi's default registry is not
# affected by our custom encoder subclasses
registry = default_registry.copy()
registry.unregister('address')
registry.unregister('bytes<M>')
registry.unregister('bytes')
registry.unregister('string')
registry.register(
BaseEquals('address'),
AddressEncoder, decoding.AddressDecoder,
label='address',
)
registry.register(
BaseEquals('bytes', with_sub=True),
BytesEncoder, decoding.BytesDecoder,
label='bytes<M>',
)
registry.register(
BaseEquals('bytes', with_sub=False),
ByteStringEncoder, decoding.ByteStringDecoder,
label='bytes',
)
registry.register(
BaseEquals('string'),
TextStringEncoder, decoding.StringDecoder,
label='string',
)
codec = ABICodec(registry)
is_encodable = codec.is_encodable
def filter_by_encodability(args, kwargs, contract_abi):
return [
function_abi
for function_abi
in contract_abi
if check_if_arguments_can_be_encoded(function_abi, args, kwargs)
]
def get_abi_inputs(function_abi, arg_values):
"""Similar to get_abi_input_types(), but gets values too.
Returns a zip of types and their corresponding argument values.
Importantly, looks in `function_abi` for tuples, and for any found, (a)
translates them from the ABI dict representation to the parenthesized type
list representation that's expected by eth_abi, and (b) translates their
corresponding arguments values from the python dict representation to the
tuple representation expected by eth_abi.
>>> get_abi_inputs(
... {
... 'inputs': [
... {
... 'components': [
... {'name': 'anAddress', 'type': 'address'},
... {'name': 'anInt', 'type': 'uint256'},
... {'name': 'someBytes', 'type': 'bytes'}
... ],
... 'name': 'arg',
... 'type': 'tuple'
... }
... ],
... 'type': 'function'
... },
... (
... {
... 'anInt': 12345,
... 'anAddress': '0x0000000000000000000000000000000000000000',
... 'someBytes': b'0000',
... },
... ),
... )
(['(address,uint256,bytes)'], (('0x0000000000000000000000000000000000000000', 12345, b'0000'),))
"""
if "inputs" not in function_abi:
return ([], ())
def collate_tuple_components(components, values):
"""Collates tuple components with their values.
:param components: is an array of ABI components, such as one extracted
from an input element of a function ABI.
:param values: can be any of a list, tuple, or dict. If a dict, key
names must correspond to component names specified in the components
parameter. If a list or array, the order of the elements should
correspond to the order of elements in the components array.
Returns a two-element tuple. The first element is a string comprised
of the parenthesized list of tuple component types. The second element
is a tuple of the values corresponding to the types in the first
element.
>>> collate_tuple_components(
... [
... {'name': 'anAddress', 'type': 'address'},
... {'name': 'anInt', 'type': 'uint256'},
... {'name': 'someBytes', 'type': 'bytes'}
... ],
... (
... {
... 'anInt': 12345,
... 'anAddress': '0x0000000000000000000000000000000000000000',
... 'someBytes': b'0000',
... },
... ),
... )
"""
component_types = []
component_values = []
for component, value in zip(components, values):
component_types.append(component["type"])
if isinstance(values, dict):
component_values.append(values[component["name"]])
elif is_list_like(values):
component_values.append(value)
else:
raise TypeError(
"Unknown value type {} for ABI type 'tuple'"
.format(type(values))
)
return component_types, component_values
types = []
values = tuple()
for abi_input, arg_value in zip(function_abi["inputs"], arg_values):
if abi_input["type"] == "tuple[]":
value_array = []
for arg_arr_elem_val in arg_value:
component_types, component_values = collate_tuple_components(
abi_input["components"], arg_arr_elem_val
)
value_array.append(component_values)
types.append("(" + ",".join(component_types) + ")[]")
values += (value_array,)
elif abi_input["type"] == "tuple":
component_types, component_values = collate_tuple_components(
abi_input["components"], arg_value
)
types.append("(" + ",".join(component_types) + ")")
values += (tuple(component_values),)
else:
types.append(abi_input["type"])
values += (arg_value,)
return types, values
def check_if_arguments_can_be_encoded(function_abi, args, kwargs):
try:
arguments = merge_args_and_kwargs(function_abi, args, kwargs)
except TypeError:
return False
if len(function_abi.get('inputs', [])) != len(arguments):
return False
types, arguments = get_abi_inputs(function_abi, arguments)
return all(
is_encodable(_type, arg)
for _type, arg in zip(types, arguments)
)
def merge_args_and_kwargs(function_abi, args, kwargs):
"""
Takes a list of positional args (``args``) and a dict of keyword args
(``kwargs``) defining values to be passed to a call to the contract function
described by ``function_abi``. Checks to ensure that the correct number of
args were given, no duplicate args were given, and no unknown args were
given. Returns a list of argument values aligned to the order of inputs
defined in ``function_abi``.
"""
# Ensure the function is being applied to the correct number of args
if len(args) + len(kwargs) != len(function_abi.get('inputs', [])):
raise TypeError(
"Incorrect argument count. Expected '{0}'. Got '{1}'".format(
len(function_abi['inputs']),
len(args) + len(kwargs),
)
)
# If no keyword args were given, we don't need to align them
if not kwargs:
return args
kwarg_names = set(kwargs.keys())
sorted_arg_names = tuple(arg_abi['name'] for arg_abi in function_abi['inputs'])
args_as_kwargs = dict(zip(sorted_arg_names, args))
# Check for duplicate args
duplicate_args = kwarg_names.intersection(args_as_kwargs.keys())
if duplicate_args:
raise TypeError(
"{fn_name}() got multiple values for argument(s) '{dups}'".format(
fn_name=function_abi['name'],
dups=', '.join(duplicate_args),
)
)
# Check for unknown args
unknown_args = kwarg_names.difference(sorted_arg_names)
if unknown_args:
if function_abi.get('name'):
raise TypeError(
"{fn_name}() got unexpected keyword argument(s) '{dups}'".format(
fn_name=function_abi.get('name'),
dups=', '.join(unknown_args),
)
)
raise TypeError(
"Type: '{_type}' got unexpected keyword argument(s) '{dups}'".format(
_type=function_abi.get('type'),
dups=', '.join(unknown_args),
)
)
# Sort args according to their position in the ABI and unzip them from their
# names
sorted_args = tuple(zip(
*sorted(
itertools.chain(kwargs.items(), args_as_kwargs.items()),
key=lambda kv: sorted_arg_names.index(kv[0]),
)
))
if sorted_args:
return sorted_args[1]
else:
return tuple()
def get_constructor_abi(contract_abi):
candidates = [
abi for abi in contract_abi if abi['type'] == 'constructor'
]
if len(candidates) == 1:
return candidates[0]
elif len(candidates) == 0:
return None
elif len(candidates) > 1:
raise ValueError("Found multiple constructors.")
DYNAMIC_TYPES = ['bytes', 'string']
INT_SIZES = range(8, 257, 8)
BYTES_SIZES = range(1, 33)
UINT_TYPES = ['uint{0}'.format(i) for i in INT_SIZES]
INT_TYPES = ['int{0}'.format(i) for i in INT_SIZES]
BYTES_TYPES = ['bytes{0}'.format(i) for i in BYTES_SIZES] + ['bytes32.byte']
STATIC_TYPES = list(itertools.chain(
['address', 'bool'],
UINT_TYPES,
INT_TYPES,
BYTES_TYPES,
))
BASE_TYPE_REGEX = '|'.join((
_type + '(?![a-z0-9])'
for _type
in itertools.chain(STATIC_TYPES, DYNAMIC_TYPES)
))
SUB_TYPE_REGEX = (
r'\['
'[0-9]*'
r'\]'
)
TYPE_REGEX = (
'^'
'(?:{base_type})'
'(?:(?:{sub_type})*)?'
'$'
).format(
base_type=BASE_TYPE_REGEX,
sub_type=SUB_TYPE_REGEX,
)
def is_recognized_type(abi_type):
return bool(re.match(TYPE_REGEX, abi_type))
def is_bool_type(abi_type):
return abi_type == 'bool'
def is_uint_type(abi_type):
return abi_type in UINT_TYPES
def is_int_type(abi_type):
return abi_type in INT_TYPES
def is_address_type(abi_type):
return abi_type == 'address'
def is_bytes_type(abi_type):
return abi_type in BYTES_TYPES + ['bytes']
def is_string_type(abi_type):
return abi_type == 'string'
@curry
def is_length(target_length, value):
return len(value) == target_length
def size_of_type(abi_type):
"""
Returns size in bits of abi_type
"""
if 'string' in abi_type:
return None
if 'byte' in abi_type:
return None
if '[' in abi_type:
return None
if abi_type == 'bool':
return 8
if abi_type == 'address':
return 160
return int(re.sub(r"\D", "", abi_type))
END_BRACKETS_OF_ARRAY_TYPE_REGEX = r"\[[^]]*\]$"
def sub_type_of_array_type(abi_type):
if not is_array_type(abi_type):
raise ValueError(
"Cannot parse subtype of nonarray abi-type: {0}".format(abi_type)
)
return re.sub(END_BRACKETS_OF_ARRAY_TYPE_REGEX, '', abi_type, 1)
def length_of_array_type(abi_type):
if not is_array_type(abi_type):
raise ValueError(
"Cannot parse length of nonarray abi-type: {0}".format(abi_type)
)
inner_brackets = re.search(END_BRACKETS_OF_ARRAY_TYPE_REGEX, abi_type).group(0).strip("[]")
if not inner_brackets:
return None
else:
return int(inner_brackets)
ARRAY_REGEX = (
"^"
"[a-zA-Z0-9_]+"
"({sub_type})+"
"$"
).format(sub_type=SUB_TYPE_REGEX)
def is_array_type(abi_type):
return bool(re.match(ARRAY_REGEX, abi_type))
NAME_REGEX = (
'[a-zA-Z_]'
'[a-zA-Z0-9_]*'
)
ENUM_REGEX = (
'^'
'{lib_name}'
r'\.'
'{enum_name}'
'$'
).format(lib_name=NAME_REGEX, enum_name=NAME_REGEX)
def is_probably_enum(abi_type):
return bool(re.match(ENUM_REGEX, abi_type))
@to_tuple
def normalize_event_input_types(abi_args):
for arg in abi_args:
if is_recognized_type(arg['type']):
yield arg
elif is_probably_enum(arg['type']):
yield {k: 'uint8' if k == 'type' else v for k, v in arg.items()}
else:
yield arg
def abi_to_signature(abi):
function_signature = "{fn_name}({fn_input_types})".format(
fn_name=abi['name'],
fn_input_types=','.join([
arg['type'] for arg in normalize_event_input_types(abi.get('inputs', []))
]),
)
return function_signature
########################################################
#
# Conditionally modifying data, tagged with ABI Types
#
########################################################
@curry
def map_abi_data(normalizers, types, data):
"""
This function will apply normalizers to your data, in the
context of the relevant types. Each normalizer is in the format:
def normalizer(datatype, data):
# Conditionally modify data
return (datatype, data)
Where datatype is a valid ABI type string, like "uint".
In case of an array, like "bool[2]", normalizer will receive `data`
as an iterable of typed data, like `[("bool", True), ("bool", False)]`.
Internals
---
This is accomplished by:
1. Decorating the data tree with types
2. Recursively mapping each of the normalizers to the data
3. Stripping the types back out of the tree
"""
pipeline = itertools.chain(
[abi_data_tree(types)],
map(data_tree_map, normalizers),
[partial(recursive_map, strip_abi_type)],
)
return pipe(data, *pipeline)
@curry
def abi_data_tree(types, data):
"""
Decorate the data tree with pairs of (type, data). The pair tuple is actually an
ABITypedData, but can be accessed as a tuple.
As an example:
>>> abi_data_tree(types=["bool[2]", "uint"], data=[[True, False], 0])
[ABITypedData(abi_type='bool[2]', data=[ABITypedData(abi_type='bool', data=True), ABITypedData(abi_type='bool', data=False)]), ABITypedData(abi_type='uint256', data=0)]
""" # noqa: E501 (line too long)
return [
abi_sub_tree(data_type, data_value)
for data_type, data_value
in zip(types, data)
]
@curry
def data_tree_map(func, data_tree):
"""
Map func to every ABITypedData element in the tree. func will
receive two args: abi_type, and data
"""
def map_to_typed_data(elements):
if (
isinstance(elements, ABITypedData) and elements.abi_type is not None and
not (
isinstance(elements.abi_type, str) and
elements.abi_type[0] == "("
)
):
return ABITypedData(func(*elements))
else:
return elements
return recursive_map(map_to_typed_data, data_tree)
class ABITypedData(namedtuple('ABITypedData', 'abi_type, data')):
"""
This class marks data as having a certain ABI-type.
>>> addr1 = "0x" + "0" * 20
>>> addr2 = "0x" + "f" * 20
>>> a1 = ABITypedData(['address', addr1])
>>> a2 = ABITypedData(['address', addr2])
>>> addrs = ABITypedData(['address[]', [a1, a2]])
You can access the fields using tuple() interface, or with
attributes:
>>> assert a1.abi_type == a1[0]
>>> assert a1.data == a1[1]
Unlike a typical `namedtuple`, you initialize with a single
positional argument that is iterable, to match the init
interface of all other relevant collections.
"""
def __new__(cls, iterable):
return super().__new__(cls, *iterable)
def abi_sub_tree(data_type, data_value):
if (
isinstance(data_type, str) and
data_type[0] == "(" and
isinstance(data_value, tuple)
):
return ABITypedData([data_type, data_value])
if data_type is None:
return ABITypedData([None, data_value])
try:
base, sub, arrlist = data_type
except ValueError:
base, sub, arrlist = process_type(data_type)
collapsed = collapse_type(base, sub, arrlist)
if arrlist:
sub_type = (base, sub, arrlist[:-1])
return ABITypedData([
collapsed,
[
abi_sub_tree(sub_type, sub_value)
for sub_value in data_value
],
])
else:
return ABITypedData([collapsed, data_value])
def strip_abi_type(elements):
if isinstance(elements, ABITypedData):
return elements.data
else:
return elements | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/abi.py | abi.py |
import pytest
from eth_abi import (
decode_single,
)
from eth_utils import (
is_boolean,
is_bytes,
is_checksum_address,
is_dict,
is_integer,
is_list_like,
is_same_address,
is_string,
)
from hexbytes import (
HexBytes,
)
from web3.exceptions import (
InvalidAddress,
)
UNKNOWN_ADDRESS = '0xdEADBEeF00000000000000000000000000000000'
UNKNOWN_HASH = '0xdeadbeef00000000000000000000000000000000000000000000000000000000'
class EthModuleTest:
def test_eth_protocolVersion(self, web3):
protocol_version = web3.version.ethereum
assert is_string(protocol_version)
assert protocol_version.isdigit()
def test_eth_syncing(self, web3):
syncing = web3.eth.syncing
assert is_boolean(syncing) or is_dict(syncing)
if is_boolean(syncing):
assert syncing is False
elif is_dict(syncing):
assert 'startingBlock' in syncing
assert 'currentBlock' in syncing
assert 'highestBlock' in syncing
assert is_integer(syncing['startingBlock'])
assert is_integer(syncing['currentBlock'])
assert is_integer(syncing['highestBlock'])
def test_eth_coinbase(self, web3):
coinbase = web3.eth.coinbase
assert is_checksum_address(coinbase)
def test_eth_mining(self, web3):
mining = web3.eth.mining
assert is_boolean(mining)
def test_eth_hashrate(self, web3):
hashrate = web3.eth.hashrate
assert is_integer(hashrate)
assert hashrate >= 0
def test_eth_gasPrice(self, web3):
gas_price = web3.eth.gasPrice
assert is_integer(gas_price)
assert gas_price > 0
def test_eth_accounts(self, web3):
accounts = web3.eth.accounts
assert is_list_like(accounts)
assert len(accounts) != 0
assert all((
is_checksum_address(account)
for account
in accounts
))
assert web3.eth.coinbase in accounts
def test_eth_blockNumber(self, web3):
block_number = web3.eth.blockNumber
assert is_integer(block_number)
assert block_number >= 0
def test_eth_getBalance(self, web3):
coinbase = web3.eth.coinbase
with pytest.raises(InvalidAddress):
web3.eth.getBalance(coinbase.lower())
balance = web3.eth.getBalance(coinbase)
assert is_integer(balance)
assert balance >= 0
def test_eth_getStorageAt(self, web3, emitter_contract_address):
storage = web3.eth.getStorageAt(emitter_contract_address, 0)
assert isinstance(storage, HexBytes)
def test_eth_getStorageAt_invalid_address(self, web3):
coinbase = web3.eth.coinbase
with pytest.raises(InvalidAddress):
web3.eth.getStorageAt(coinbase.lower(), 0)
def test_eth_getTransactionCount(self, web3, unlocked_account_dual_type):
transaction_count = web3.eth.getTransactionCount(unlocked_account_dual_type)
assert is_integer(transaction_count)
assert transaction_count >= 0
def test_eth_getTransactionCount_invalid_address(self, web3):
coinbase = web3.eth.coinbase
with pytest.raises(InvalidAddress):
web3.eth.getTransactionCount(coinbase.lower())
def test_eth_getBlockTransactionCountByHash_empty_block(self, web3, empty_block):
transaction_count = web3.eth.getBlockTransactionCount(empty_block['hash'])
assert is_integer(transaction_count)
assert transaction_count == 0
def test_eth_getBlockTransactionCountByNumber_empty_block(self, web3, empty_block):
transaction_count = web3.eth.getBlockTransactionCount(empty_block['number'])
assert is_integer(transaction_count)
assert transaction_count == 0
def test_eth_getBlockTransactionCountByHash_block_with_txn(self, web3, block_with_txn):
transaction_count = web3.eth.getBlockTransactionCount(block_with_txn['hash'])
assert is_integer(transaction_count)
assert transaction_count >= 1
def test_eth_getBlockTransactionCountByNumber_block_with_txn(self, web3, block_with_txn):
transaction_count = web3.eth.getBlockTransactionCount(block_with_txn['number'])
assert is_integer(transaction_count)
assert transaction_count >= 1
def test_eth_getUncleCountByBlockHash(self, web3, empty_block):
uncle_count = web3.eth.getUncleCount(empty_block['hash'])
assert is_integer(uncle_count)
assert uncle_count == 0
def test_eth_getUncleCountByBlockNumber(self, web3, empty_block):
uncle_count = web3.eth.getUncleCount(empty_block['number'])
assert is_integer(uncle_count)
assert uncle_count == 0
def test_eth_getCode(self, web3, math_contract_address):
code = web3.eth.getCode(math_contract_address)
assert isinstance(code, HexBytes)
assert len(code) > 0
def test_eth_getCode_invalid_address(self, web3, math_contract):
with pytest.raises(InvalidAddress):
web3.eth.getCode(math_contract.address.lower())
def test_eth_getCode_with_block_identifier(self, web3, emitter_contract):
code = web3.eth.getCode(emitter_contract.address, block_identifier=web3.eth.blockNumber)
assert isinstance(code, HexBytes)
assert len(code) > 0
def test_eth_sign(self, web3, unlocked_account_dual_type):
signature = web3.eth.sign(
unlocked_account_dual_type, text='Message tö sign. Longer than hash!'
)
assert is_bytes(signature)
assert len(signature) == 32 + 32 + 1
# test other formats
hexsign = web3.eth.sign(
unlocked_account_dual_type,
hexstr='0x4d6573736167652074c3b6207369676e2e204c6f6e676572207468616e206861736821'
)
assert hexsign == signature
intsign = web3.eth.sign(
unlocked_account_dual_type,
0x4d6573736167652074c3b6207369676e2e204c6f6e676572207468616e206861736821
)
assert intsign == signature
bytessign = web3.eth.sign(
unlocked_account_dual_type, b'Message t\xc3\xb6 sign. Longer than hash!'
)
assert bytessign == signature
new_signature = web3.eth.sign(
unlocked_account_dual_type, text='different message is different'
)
assert new_signature != signature
def test_eth_sendTransaction_addr_checksum_required(self, web3, unlocked_account):
non_checksum_addr = unlocked_account.lower()
txn_params = {
'from': unlocked_account,
'to': unlocked_account,
'value': 1,
'gas': 21000,
'gasPrice': web3.eth.gasPrice,
}
with pytest.raises(InvalidAddress):
invalid_params = dict(txn_params, **{'from': non_checksum_addr})
web3.eth.sendTransaction(invalid_params)
with pytest.raises(InvalidAddress):
invalid_params = dict(txn_params, **{'to': non_checksum_addr})
web3.eth.sendTransaction(invalid_params)
def test_eth_sendTransaction(self, web3, unlocked_account_dual_type):
txn_params = {
'from': unlocked_account_dual_type,
'to': unlocked_account_dual_type,
'value': 1,
'gas': 21000,
'gasPrice': web3.eth.gasPrice,
}
txn_hash = web3.eth.sendTransaction(txn_params)
txn = web3.eth.getTransaction(txn_hash)
assert is_same_address(txn['from'], txn_params['from'])
assert is_same_address(txn['to'], txn_params['to'])
assert txn['value'] == 1
assert txn['gas'] == 21000
assert txn['gasPrice'] == txn_params['gasPrice']
def test_eth_sendTransaction_with_nonce(self, web3, unlocked_account):
txn_params = {
'from': unlocked_account,
'to': unlocked_account,
'value': 1,
'gas': 21000,
# Increased gas price to ensure transaction hash different from other tests
'gasPrice': web3.eth.gasPrice * 2,
'nonce': web3.eth.getTransactionCount(unlocked_account),
}
txn_hash = web3.eth.sendTransaction(txn_params)
txn = web3.eth.getTransaction(txn_hash)
assert is_same_address(txn['from'], txn_params['from'])
assert is_same_address(txn['to'], txn_params['to'])
assert txn['value'] == 1
assert txn['gas'] == 21000
assert txn['gasPrice'] == txn_params['gasPrice']
assert txn['nonce'] == txn_params['nonce']
def test_eth_replaceTransaction(self, web3, unlocked_account_dual_type):
txn_params = {
'from': unlocked_account_dual_type,
'to': unlocked_account_dual_type,
'value': 1,
'gas': 21000,
'gasPrice': web3.eth.gasPrice,
}
txn_hash = web3.eth.sendTransaction(txn_params)
txn_params['gasPrice'] = web3.eth.gasPrice * 2
replace_txn_hash = web3.eth.replaceTransaction(txn_hash, txn_params)
replace_txn = web3.eth.getTransaction(replace_txn_hash)
assert is_same_address(replace_txn['from'], txn_params['from'])
assert is_same_address(replace_txn['to'], txn_params['to'])
assert replace_txn['value'] == 1
assert replace_txn['gas'] == 21000
assert replace_txn['gasPrice'] == txn_params['gasPrice']
def test_eth_replaceTransaction_non_existing_transaction(
self, web3, unlocked_account_dual_type):
txn_params = {
'from': unlocked_account_dual_type,
'to': unlocked_account_dual_type,
'value': 1,
'gas': 21000,
'gasPrice': web3.eth.gasPrice,
}
with pytest.raises(ValueError):
web3.eth.replaceTransaction(
'0x98e8cc09b311583c5079fa600f6c2a3bea8611af168c52e4b60b5b243a441997',
txn_params
)
# auto mine is enabled for this test
def test_eth_replaceTransaction_already_mined(self, web3, unlocked_account_dual_type):
txn_params = {
'from': unlocked_account_dual_type,
'to': unlocked_account_dual_type,
'value': 1,
'gas': 21000,
'gasPrice': web3.eth.gasPrice,
}
txn_hash = web3.eth.sendTransaction(txn_params)
txn_params['gasPrice'] = web3.eth.gasPrice * 2
with pytest.raises(ValueError):
web3.eth.replaceTransaction(txn_hash, txn_params)
def test_eth_replaceTransaction_incorrect_nonce(self, web3, unlocked_account):
txn_params = {
'from': unlocked_account,
'to': unlocked_account,
'value': 1,
'gas': 21000,
'gasPrice': web3.eth.gasPrice,
}
txn_hash = web3.eth.sendTransaction(txn_params)
txn = web3.eth.getTransaction(txn_hash)
txn_params['gasPrice'] = web3.eth.gasPrice * 2
txn_params['nonce'] = txn['nonce'] + 1
with pytest.raises(ValueError):
web3.eth.replaceTransaction(txn_hash, txn_params)
def test_eth_replaceTransaction_gas_price_too_low(self, web3, unlocked_account_dual_type):
txn_params = {
'from': unlocked_account_dual_type,
'to': unlocked_account_dual_type,
'value': 1,
'gas': 21000,
'gasPrice': 10,
}
txn_hash = web3.eth.sendTransaction(txn_params)
txn_params['gasPrice'] = 9
with pytest.raises(ValueError):
web3.eth.replaceTransaction(txn_hash, txn_params)
def test_eth_replaceTransaction_gas_price_defaulting_minimum(self, web3, unlocked_account):
txn_params = {
'from': unlocked_account,
'to': unlocked_account,
'value': 1,
'gas': 21000,
'gasPrice': 10,
}
txn_hash = web3.eth.sendTransaction(txn_params)
txn_params.pop('gasPrice')
replace_txn_hash = web3.eth.replaceTransaction(txn_hash, txn_params)
replace_txn = web3.eth.getTransaction(replace_txn_hash)
assert replace_txn['gasPrice'] == 11 # minimum gas price
def test_eth_replaceTransaction_gas_price_defaulting_strategy_higher(self,
web3,
unlocked_account):
txn_params = {
'from': unlocked_account,
'to': unlocked_account,
'value': 1,
'gas': 21000,
'gasPrice': 10,
}
txn_hash = web3.eth.sendTransaction(txn_params)
def higher_gas_price_strategy(web3, txn):
return 20
web3.eth.setGasPriceStrategy(higher_gas_price_strategy)
txn_params.pop('gasPrice')
replace_txn_hash = web3.eth.replaceTransaction(txn_hash, txn_params)
replace_txn = web3.eth.getTransaction(replace_txn_hash)
assert replace_txn['gasPrice'] == 20 # Strategy provides higher gas price
def test_eth_replaceTransaction_gas_price_defaulting_strategy_lower(self,
web3,
unlocked_account):
txn_params = {
'from': unlocked_account,
'to': unlocked_account,
'value': 1,
'gas': 21000,
'gasPrice': 10,
}
txn_hash = web3.eth.sendTransaction(txn_params)
def lower_gas_price_strategy(web3, txn):
return 5
web3.eth.setGasPriceStrategy(lower_gas_price_strategy)
txn_params.pop('gasPrice')
replace_txn_hash = web3.eth.replaceTransaction(txn_hash, txn_params)
replace_txn = web3.eth.getTransaction(replace_txn_hash)
# Strategy provices lower gas price - minimum preferred
assert replace_txn['gasPrice'] == 11
def test_eth_modifyTransaction(self, web3, unlocked_account):
txn_params = {
'from': unlocked_account,
'to': unlocked_account,
'value': 1,
'gas': 21000,
'gasPrice': web3.eth.gasPrice,
}
txn_hash = web3.eth.sendTransaction(txn_params)
modified_txn_hash = web3.eth.modifyTransaction(
txn_hash, gasPrice=(txn_params['gasPrice'] * 2), value=2
)
modified_txn = web3.eth.getTransaction(modified_txn_hash)
assert is_same_address(modified_txn['from'], txn_params['from'])
assert is_same_address(modified_txn['to'], txn_params['to'])
assert modified_txn['value'] == 2
assert modified_txn['gas'] == 21000
assert modified_txn['gasPrice'] == txn_params['gasPrice'] * 2
@pytest.mark.parametrize(
'raw_transaction, expected_hash',
[
(
# address 0x39EEed73fb1D3855E90Cbd42f348b3D7b340aAA6
'0xf8648085174876e8008252089439eeed73fb1d3855e90cbd42f348b3d7b340aaa601801ba0ec1295f00936acd0c2cb90ab2cdaacb8bf5e11b3d9957833595aca9ceedb7aada05dfc8937baec0e26029057abd3a1ef8c505dca2cdc07ffacb046d090d2bea06a', # noqa: E501
'0x1f80f8ab5f12a45be218f76404bda64d37270a6f4f86ededd0eb599f80548c13',
),
(
# private key 0x3c2ab4e8f17a7dea191b8c991522660126d681039509dc3bb31af7c9bdb63518
# This is an unfunded account, but the transaction has a 0 gas price, so is valid.
# It never needs to be mined, we just want the transaction hash back to confirm.
HexBytes('0xf85f808082c35094d898d5e829717c72e7438bad593076686d7d164a80801ba005c2e99ecee98a12fbf28ab9577423f42e9e88f2291b3acc8228de743884c874a077d6bc77a47ad41ec85c96aac2ad27f05a039c4787fca8a1e5ee2d8c7ec1bb6a'), # noqa: E501
'0x98eeadb99454427f6aad7b558bac13e9d225512a6f5e5c11cf48e8d4067e51b5',
),
]
)
def test_eth_sendRawTransaction(self,
web3,
raw_transaction,
funded_account_for_raw_txn,
expected_hash):
txn_hash = web3.eth.sendRawTransaction(raw_transaction)
assert txn_hash == web3.toBytes(hexstr=expected_hash)
def test_eth_call(self, web3, math_contract):
coinbase = web3.eth.coinbase
txn_params = math_contract._prepare_transaction(
fn_name='add',
fn_args=(7, 11),
transaction={'from': coinbase, 'to': math_contract.address},
)
call_result = web3.eth.call(txn_params)
assert is_string(call_result)
result = decode_single('uint256', call_result)
assert result == 18
def test_eth_call_with_0_result(self, web3, math_contract):
coinbase = web3.eth.coinbase
txn_params = math_contract._prepare_transaction(
fn_name='add',
fn_args=(0, 0),
transaction={'from': coinbase, 'to': math_contract.address},
)
call_result = web3.eth.call(txn_params)
assert is_string(call_result)
result = decode_single('uint256', call_result)
assert result == 0
def test_eth_estimateGas(self, web3, unlocked_account_dual_type):
gas_estimate = web3.eth.estimateGas({
'from': unlocked_account_dual_type,
'to': unlocked_account_dual_type,
'value': 1,
})
assert is_integer(gas_estimate)
assert gas_estimate > 0
def test_eth_estimateGas_with_block(self,
web3,
unlocked_account_dual_type):
gas_estimate = web3.eth.estimateGas({
'from': unlocked_account_dual_type,
'to': unlocked_account_dual_type,
'value': 1,
}, 'latest')
assert is_integer(gas_estimate)
assert gas_estimate > 0
def test_eth_getBlockByHash(self, web3, empty_block):
block = web3.eth.getBlock(empty_block['hash'])
assert block['hash'] == empty_block['hash']
def test_eth_getBlockByHash_not_found(self, web3, empty_block):
block = web3.eth.getBlock(UNKNOWN_HASH)
assert block is None
def test_eth_getBlockByNumber_with_integer(self, web3, empty_block):
block = web3.eth.getBlock(empty_block['number'])
assert block['number'] == empty_block['number']
def test_eth_getBlockByNumber_latest(self, web3, empty_block):
current_block_number = web3.eth.blockNumber
block = web3.eth.getBlock('latest')
assert block['number'] == current_block_number
def test_eth_getBlockByNumber_not_found(self, web3, empty_block):
block = web3.eth.getBlock(12345)
assert block is None
def test_eth_getBlockByNumber_pending(self, web3, empty_block):
current_block_number = web3.eth.blockNumber
block = web3.eth.getBlock('pending')
assert block['number'] == current_block_number + 1
def test_eth_getBlockByNumber_earliest(self, web3, empty_block):
genesis_block = web3.eth.getBlock(0)
block = web3.eth.getBlock('earliest')
assert block['number'] == 0
assert block['hash'] == genesis_block['hash']
def test_eth_getBlockByNumber_full_transactions(self, web3, block_with_txn):
block = web3.eth.getBlock(block_with_txn['number'], True)
transaction = block['transactions'][0]
assert transaction['hash'] == block_with_txn['transactions'][0]
def test_eth_getTransactionByHash(self, web3, mined_txn_hash):
transaction = web3.eth.getTransaction(mined_txn_hash)
assert is_dict(transaction)
assert transaction['hash'] == HexBytes(mined_txn_hash)
def test_eth_getTransactionByHash_contract_creation(self,
web3,
math_contract_deploy_txn_hash):
transaction = web3.eth.getTransaction(math_contract_deploy_txn_hash)
assert is_dict(transaction)
assert transaction['to'] is None, "to field is %r" % transaction['to']
def test_eth_getTransactionFromBlockHashAndIndex(self, web3, block_with_txn, mined_txn_hash):
transaction = web3.eth.getTransactionFromBlock(block_with_txn['hash'], 0)
assert is_dict(transaction)
assert transaction['hash'] == HexBytes(mined_txn_hash)
def test_eth_getTransactionFromBlockNumberAndIndex(self, web3, block_with_txn, mined_txn_hash):
transaction = web3.eth.getTransactionFromBlock(block_with_txn['number'], 0)
assert is_dict(transaction)
assert transaction['hash'] == HexBytes(mined_txn_hash)
def test_eth_getTransactionByBlockHashAndIndex(self, web3, block_with_txn, mined_txn_hash):
transaction = web3.eth.getTransactionByBlock(block_with_txn['hash'], 0)
assert is_dict(transaction)
assert transaction['hash'] == HexBytes(mined_txn_hash)
def test_eth_getTransactionByBlockNumberAndIndex(self, web3, block_with_txn, mined_txn_hash):
transaction = web3.eth.getTransactionByBlock(block_with_txn['number'], 0)
assert is_dict(transaction)
assert transaction['hash'] == HexBytes(mined_txn_hash)
def test_eth_getTransactionReceipt_mined(self, web3, block_with_txn, mined_txn_hash):
receipt = web3.eth.getTransactionReceipt(mined_txn_hash)
assert is_dict(receipt)
assert receipt['blockNumber'] == block_with_txn['number']
assert receipt['blockHash'] == block_with_txn['hash']
assert receipt['transactionIndex'] == 0
assert receipt['transactionHash'] == HexBytes(mined_txn_hash)
def test_eth_getTransactionReceipt_unmined(self, web3, unlocked_account_dual_type):
txn_hash = web3.eth.sendTransaction({
'from': unlocked_account_dual_type,
'to': unlocked_account_dual_type,
'value': 1,
'gas': 21000,
'gasPrice': web3.eth.gasPrice,
})
receipt = web3.eth.getTransactionReceipt(txn_hash)
assert receipt is None
def test_eth_getTransactionReceipt_with_log_entry(self,
web3,
block_with_txn_with_log,
emitter_contract,
txn_hash_with_log):
receipt = web3.eth.getTransactionReceipt(txn_hash_with_log)
assert is_dict(receipt)
assert receipt['blockNumber'] == block_with_txn_with_log['number']
assert receipt['blockHash'] == block_with_txn_with_log['hash']
assert receipt['transactionIndex'] == 0
assert receipt['transactionHash'] == HexBytes(txn_hash_with_log)
assert len(receipt['logs']) == 1
log_entry = receipt['logs'][0]
assert log_entry['blockNumber'] == block_with_txn_with_log['number']
assert log_entry['blockHash'] == block_with_txn_with_log['hash']
assert log_entry['logIndex'] == 0
assert is_same_address(log_entry['address'], emitter_contract.address)
assert log_entry['transactionIndex'] == 0
assert log_entry['transactionHash'] == HexBytes(txn_hash_with_log)
def test_eth_getUncleByBlockHashAndIndex(self, web3):
# TODO: how do we make uncles....
pass
def test_eth_getUncleByBlockNumberAndIndex(self, web3):
# TODO: how do we make uncles....
pass
def test_eth_getCompilers(self, web3):
# TODO: do we want to test this?
pass
def test_eth_compileSolidity(self, web3):
# TODO: do we want to test this?
pass
def test_eth_compileLLL(self, web3):
# TODO: do we want to test this?
pass
def test_eth_compileSerpent(self, web3):
# TODO: do we want to test this?
pass
def test_eth_newFilter(self, web3):
filter = web3.eth.filter({})
changes = web3.eth.getFilterChanges(filter.filter_id)
assert is_list_like(changes)
assert not changes
logs = web3.eth.getFilterLogs(filter.filter_id)
assert is_list_like(logs)
assert not logs
result = web3.eth.uninstallFilter(filter.filter_id)
assert result is True
def test_eth_newBlockFilter(self, web3):
filter = web3.eth.filter('latest')
assert is_string(filter.filter_id)
changes = web3.eth.getFilterChanges(filter.filter_id)
assert is_list_like(changes)
assert not changes
# TODO: figure out why this fails in go-ethereum
# logs = web3.eth.getFilterLogs(filter.filter_id)
# assert is_list_like(logs)
# assert not logs
result = web3.eth.uninstallFilter(filter.filter_id)
assert result is True
def test_eth_newPendingTransactionFilter(self, web3):
filter = web3.eth.filter('pending')
assert is_string(filter.filter_id)
changes = web3.eth.getFilterChanges(filter.filter_id)
assert is_list_like(changes)
assert not changes
# TODO: figure out why this fails in go-ethereum
# logs = web3.eth.getFilterLogs(filter.filter_id)
# assert is_list_like(logs)
# assert not logs
result = web3.eth.uninstallFilter(filter.filter_id)
assert result is True
def test_eth_getLogs_without_logs(self, web3, block_with_txn_with_log):
# Test with block range
filter_params = {
"fromBlock": 0,
"toBlock": block_with_txn_with_log['number'] - 1,
}
result = web3.eth.getLogs(filter_params)
assert len(result) == 0
# the range is wrong
filter_params = {
"fromBlock": block_with_txn_with_log['number'],
"toBlock": block_with_txn_with_log['number'] - 1,
}
result = web3.eth.getLogs(filter_params)
assert len(result) == 0
# Test with `address`
# filter with other address
filter_params = {
"fromBlock": 0,
"address": UNKNOWN_ADDRESS,
}
result = web3.eth.getLogs(filter_params)
assert len(result) == 0
# Test with multiple `address`
# filter with other address
filter_params = {
"fromBlock": 0,
"address": [UNKNOWN_ADDRESS, UNKNOWN_ADDRESS],
}
result = web3.eth.getLogs(filter_params)
assert len(result) == 0
def test_eth_getLogs_with_logs(
self,
web3,
block_with_txn_with_log,
emitter_contract_address,
txn_hash_with_log):
def assert_contains_log(result):
assert len(result) == 1
log_entry = result[0]
assert log_entry['blockNumber'] == block_with_txn_with_log['number']
assert log_entry['blockHash'] == block_with_txn_with_log['hash']
assert log_entry['logIndex'] == 0
assert is_same_address(log_entry['address'], emitter_contract_address)
assert log_entry['transactionIndex'] == 0
assert log_entry['transactionHash'] == HexBytes(txn_hash_with_log)
# Test with block range
# the range includes the block where the log resides in
filter_params = {
"fromBlock": block_with_txn_with_log['number'],
"toBlock": block_with_txn_with_log['number'],
}
result = web3.eth.getLogs(filter_params)
assert_contains_log(result)
# specify only `from_block`. by default `to_block` should be 'latest'
filter_params = {
"fromBlock": 0,
}
result = web3.eth.getLogs(filter_params)
assert_contains_log(result)
# Test with `address`
# filter with emitter_contract.address
filter_params = {
"fromBlock": 0,
"address": emitter_contract_address,
}
def test_eth_getLogs_with_logs_topic_args(
self,
web3,
block_with_txn_with_log,
emitter_contract_address,
txn_hash_with_log):
def assert_contains_log(result):
assert len(result) == 1
log_entry = result[0]
assert log_entry['blockNumber'] == block_with_txn_with_log['number']
assert log_entry['blockHash'] == block_with_txn_with_log['hash']
assert log_entry['logIndex'] == 0
assert is_same_address(log_entry['address'], emitter_contract_address)
assert log_entry['transactionIndex'] == 0
assert log_entry['transactionHash'] == HexBytes(txn_hash_with_log)
# Test with None event sig
filter_params = {
"fromBlock": 0,
"topics": [
None,
'0x000000000000000000000000000000000000000000000000000000000000d431'],
}
result = web3.eth.getLogs(filter_params)
assert_contains_log(result)
# Test with None indexed arg
filter_params = {
"fromBlock": 0,
"topics": [
'0x057bc32826fbe161da1c110afcdcae7c109a8b69149f727fc37a603c60ef94ca',
None],
}
result = web3.eth.getLogs(filter_params)
assert_contains_log(result)
def test_eth_getLogs_with_logs_none_topic_args(
self,
web3):
# Test with None overflowing
filter_params = {
"fromBlock": 0,
"topics": [None, None, None],
}
result = web3.eth.getLogs(filter_params)
assert len(result) == 0
def test_eth_call_old_contract_state(self, web3, math_contract, unlocked_account):
start_block = web3.eth.getBlock('latest')
block_num = start_block.number
block_hash = start_block.hash
math_contract.functions.increment().transact({'from': unlocked_account})
# This isn't an incredibly convincing test since we can't mine, and
# the default resolved block is latest, So if block_identifier was ignored
# we would get the same result. For now, we mostly depend on core tests.
# Ideas to improve this test:
# - Enable on-demand mining in more clients
# - Increment the math contract in all of the fixtures, and check the value in an old block
block_hash_call_result = math_contract.functions.counter().call(block_identifier=block_hash)
block_num_call_result = math_contract.functions.counter().call(block_identifier=block_num)
latest_call_result = math_contract.functions.counter().call(block_identifier='latest')
default_call_result = math_contract.functions.counter().call()
pending_call_result = math_contract.functions.counter().call(block_identifier='pending')
assert block_hash_call_result == 0
assert block_num_call_result == 0
assert latest_call_result == 0
assert default_call_result == 0
if pending_call_result != 1:
raise AssertionError("pending call result was %d instead of 1" % pending_call_result)
def test_eth_uninstallFilter(self, web3):
filter = web3.eth.filter({})
assert is_string(filter.filter_id)
success = web3.eth.uninstallFilter(filter.filter_id)
assert success is True
failure = web3.eth.uninstallFilter(filter.filter_id)
assert failure is False | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/module_testing/eth_module.py | eth_module.py |
EMITTER_BYTECODE = (
"60606040526104ae806100126000396000f3606060405236156100615760e060020a60003504630b"
"b563d6811461006357806317c0c1801461013657806320f0256e1461017057806390b41d8b146101"
"ca5780639c37705314610215578063aa6fd82214610267578063e17bf956146102a9575b005b6020"
"6004803580820135601f810184900490930260809081016040526060848152610061946024939192"
"918401918190838280828437509496505050505050507fa95e6e2a182411e7a6f9ed114a85c3761d"
"87f9b8f453d842c71235aa64fff99f81604051808060200182810382528381815181526020019150"
"80519060200190808383829060006004602084601f0104600f02600301f150905090810190601f16"
"80156101255780820380516001836020036101000a031916815260200191505b5092505050604051"
"80910390a15b50565b610061600435600181141561037a577f1e86022f78f8d04f8e3dfd13a2bdb2"
"80403e6632877c0dbee5e4eeb259908a5c60006060a1610133565b61006160043560243560443560"
"64356084356005851415610392576060848152608084815260a084905260c08390527ff039d147f2"
"3fe975a4254bdf6b1502b8c79132ae1833986b7ccef2638e73fdf991a15b5050505050565b610061"
"60043560243560443560038314156103d457606082815260808290527fdf0cb1dea99afceb3ea698"
"d62e705b736f1345a7eee9eb07e63d1f8f556c1bc590604090a15b505050565b6100616004356024"
"356044356064356004841415610428576060838152608083905260a08290527f4a25b279c7c585f2"
"5eda9788ac9420ebadae78ca6b206a0e6ab488fd81f550629080a15b50505050565b610061600435"
"60243560028214156104655760608181527f56d2ef3c5228bf5d88573621e325a4672ab50e033749"
"a601e4f4a5e1dce905d490602090a15b5050565b60206004803580820135601f8101849004909302"
"60809081016040526060848152610061946024939192918401918190838280828437509496505050"
"505050507f532fd6ea96cfb78bb46e09279a26828b8b493de1a2b8b1ee1face527978a15a5816040"
"51808060200182810382528381815181526020019150805190602001908083838290600060046020"
"84601f0104600f02600301f150905090810190601f16801561012557808203805160018360200361"
"01000a03191681526020019150509250505060405180910390a150565b600081141561038d576000"
"6060a0610133565b610002565b600b85141561038d5760608481526080849052819083907fa30ece"
"802b64cd2b7e57dabf4010aabf5df26d1556977affb07b98a77ad955b590604090a36101c3565b60"
"0983141561040f57606082815281907f057bc32826fbe161da1c110afcdcae7c109a8b69149f727f"
"c37a603c60ef94ca90602090a2610210565b600883141561038d5760608281528190602090a16102"
"10565b600a84141561038d576060838152819083907ff16c999b533366ca5138d78e85da51611089"
"cd05749f098d6c225d4cd42ee6ec90602090a3610261565b600782141561049a57807ff70fe689e2"
"90d8ce2b2a388ac28db36fbb0e16a6d89c6804c461f65a1b40bb1560006060a26102a5565b600682"
"141561038d578060006060a16102a556"
)
EMITTER_ABI = [
{
"constant": False,
"inputs": [{"name": "v", "type": "string"}],
"name": "logString",
"outputs": [],
"type": "function",
},
{
"constant": False,
"inputs": [{"name": "which", "type": "uint8"}],
"name": "logNoArgs",
"outputs": [],
"type": "function",
},
{
"constant": False,
"inputs": [
{"name": "which", "type": "uint8"},
{"name": "arg0", "type": "uint256"},
{"name": "arg1", "type": "uint256"},
{"name": "arg2", "type": "uint256"},
{"name": "arg3", "type": "uint256"},
],
"name": "logQuadruple",
"outputs": [],
"type": "function",
},
{
"constant": False,
"inputs": [
{"name": "which", "type": "uint8"},
{"name": "arg0", "type": "uint256"},
{"name": "arg1", "type": "uint256"},
],
"name": "logDouble",
"outputs": [],
"type": "function",
},
{
"constant": False,
"inputs": [
{"name": "which", "type": "uint8"},
{"name": "arg0", "type": "uint256"},
{"name": "arg1", "type": "uint256"},
{"name": "arg2", "type": "uint256"},
],
"name": "logTriple",
"outputs": [],
"type": "function",
},
{
"constant": False,
"inputs": [
{"name": "which", "type": "uint8"},
{"name": "arg0", "type": "uint256"},
],
"name": "logSingle",
"outputs": [],
"type": "function",
},
{
"constant": False,
"inputs": [{"name": "v", "type": "bytes"}],
"name": "logBytes",
"outputs": [],
"type": "function",
},
{
"anonymous": True,
"inputs": [],
"name": "LogAnonymous",
"type": "event",
},
{
"anonymous": False,
"inputs": [],
"name": "LogNoArguments",
"type": "event",
},
{
"anonymous": False,
"inputs": [{"indexed": False, "name": "arg0", "type": "uint256"}],
"name": "LogSingleArg",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{"indexed": False, "name": "arg0", "type": "uint256"},
{"indexed": False, "name": "arg1", "type": "uint256"},
],
"name": "LogDoubleArg",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{"indexed": False, "name": "arg0", "type": "uint256"},
{"indexed": False, "name": "arg1", "type": "uint256"},
{"indexed": False, "name": "arg2", "type": "uint256"},
],
"name": "LogTripleArg",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{"indexed": False, "name": "arg0", "type": "uint256"},
{"indexed": False, "name": "arg1", "type": "uint256"},
{"indexed": False, "name": "arg2", "type": "uint256"},
{"indexed": False, "name": "arg3", "type": "uint256"},
],
"name": "LogQuadrupleArg",
"type": "event",
},
{
"anonymous": True,
"inputs": [{"indexed": True, "name": "arg0", "type": "uint256"}],
"name": "LogSingleAnonymous",
"type": "event",
},
{
"anonymous": False,
"inputs": [{"indexed": True, "name": "arg0", "type": "uint256"}],
"name": "LogSingleWithIndex",
"type": "event",
},
{
"anonymous": True,
"inputs": [
{"indexed": False, "name": "arg0", "type": "uint256"},
{"indexed": True, "name": "arg1", "type": "uint256"},
],
"name": "LogDoubleAnonymous",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{"indexed": False, "name": "arg0", "type": "uint256"},
{"indexed": True, "name": "arg1", "type": "uint256"},
],
"name": "LogDoubleWithIndex",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{"indexed": False, "name": "arg0", "type": "uint256"},
{"indexed": True, "name": "arg1", "type": "uint256"},
{"indexed": True, "name": "arg2", "type": "uint256"},
],
"name": "LogTripleWithIndex",
"type": "event",
},
{
"anonymous": False,
"inputs": [
{"indexed": False, "name": "arg0", "type": "uint256"},
{"indexed": False, "name": "arg1", "type": "uint256"},
{"indexed": True, "name": "arg2", "type": "uint256"},
{"indexed": True, "name": "arg3", "type": "uint256"},
],
"name": "LogQuadrupleWithIndex",
"type": "event",
},
{
"anonymous": False,
"inputs": [{"indexed": False, "name": "v", "type": "bytes"}],
"name": "LogBytes",
"type": "event",
},
{
"anonymous": False,
"inputs": [{"indexed": False, "name": "v", "type": "string"}],
"name": "LogString",
"type": "event",
},
]
EMITTER_ENUM = {
'LogAnonymous': 0,
'LogNoArguments': 1,
'LogSingleArg': 2,
'LogDoubleArg': 3,
'LogTripleArg': 4,
'LogQuadrupleArg': 5,
'LogSingleAnonymous': 6,
'LogSingleWithIndex': 7,
'LogDoubleAnonymous': 8,
'LogDoubleWithIndex': 9,
'LogTripleWithIndex': 10,
'LogQuadrupleWithInde': 11,
} | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/module_testing/emitter_contract.py | emitter_contract.py |
MATH_BYTECODE = (
"606060405261022e806100126000396000f360606040523615610074576000357c01000000000000"
"000000000000000000000000000000000000000000009004806316216f391461007657806361bc22"
"1a146100995780637cf5dab0146100bc578063a5f3c23b146100e8578063d09de08a1461011d5780"
"63dcf537b11461014057610074565b005b610083600480505061016c565b60405180828152602001"
"91505060405180910390f35b6100a6600480505061017f565b604051808281526020019150506040"
"5180910390f35b6100d26004808035906020019091905050610188565b6040518082815260200191"
"505060405180910390f35b61010760048080359060200190919080359060200190919050506101ea"
"565b6040518082815260200191505060405180910390f35b61012a6004805050610201565b604051"
"8082815260200191505060405180910390f35b610156600480803590602001909190505061021756"
"5b6040518082815260200191505060405180910390f35b6000600d9050805080905061017c565b90"
"565b60006000505481565b6000816000600082828250540192505081905550600060005054905080"
"507f3496c3ede4ec3ab3686712aa1c238593ea6a42df83f98a5ec7df9834cfa577c5816040518082"
"815260200191505060405180910390a18090506101e5565b919050565b6000818301905080508090"
"506101fb565b92915050565b600061020d6001610188565b9050610214565b90565b600060078202"
"90508050809050610229565b91905056"
)
MATH_ABI = [
{
"constant": False,
"inputs": [],
"name": "return13",
"outputs": [
{"name": "result", "type": "int256"},
],
"type": "function",
},
{
"constant": True,
"inputs": [],
"name": "counter",
"outputs": [
{"name": "", "type": "uint256"},
],
"type": "function",
},
{
"constant": False,
"inputs": [
{"name": "amt", "type": "uint256"},
],
"name": "increment",
"outputs": [
{"name": "result", "type": "uint256"},
],
"type": "function",
},
{
"constant": False,
"inputs": [
{"name": "a", "type": "int256"},
{"name": "b", "type": "int256"},
],
"name": "add",
"outputs": [
{"name": "result", "type": "int256"},
],
"type": "function",
},
{
"constant": False,
"inputs": [],
"name": "increment",
"outputs": [
{"name": "", "type": "uint256"},
],
"type": "function"
},
{
"constant": False,
"inputs": [
{"name": "a", "type": "int256"},
],
"name": "multiply7",
"outputs": [
{"name": "result", "type": "int256"},
],
"type": "function",
},
{
"anonymous": False,
"inputs": [
{"indexed": False, "name": "value", "type": "uint256"},
],
"name": "Increased",
"type": "event",
},
] | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/module_testing/math_contract.py | math_contract.py |
import pytest
from eth_utils import (
add_0x_prefix,
)
from web3._utils.formatters import (
hex_to_integer,
)
class ParityModuleTest:
def test_list_storage_keys_no_support(self, web3, emitter_contract_address):
keys = web3.parity.listStorageKeys(emitter_contract_address, 10, None)
assert keys is None
def test_trace_replay_transaction(self, web3, parity_fixture_data):
trace = web3.parity.traceReplayTransaction(parity_fixture_data['mined_txn_hash'])
assert trace['stateDiff'] is None
assert trace['vmTrace'] is None
assert trace['trace'][0]['action']['from'] == add_0x_prefix(parity_fixture_data['coinbase'])
def test_trace_replay_block_with_transactions(self,
web3,
block_with_txn,
parity_fixture_data):
trace = web3.parity.traceReplayBlockTransactions(block_with_txn['number'])
assert len(trace) > 0
trace_0_action = trace[0]['trace'][0]['action']
assert trace_0_action['from'] == add_0x_prefix(parity_fixture_data['coinbase'])
def test_trace_replay_block_without_transactions(self, web3, empty_block):
trace = web3.parity.traceReplayBlockTransactions(empty_block['number'])
assert len(trace) == 0
def test_trace_block(self, web3, block_with_txn):
trace = web3.parity.traceBlock(block_with_txn['number'])
assert trace[0]['blockNumber'] == block_with_txn['number']
def test_trace_transaction(self, web3, parity_fixture_data):
trace = web3.parity.traceTransaction(parity_fixture_data['mined_txn_hash'])
assert trace[0]['action']['from'] == add_0x_prefix(parity_fixture_data['coinbase'])
def test_trace_call(self, web3, math_contract, math_contract_address):
coinbase = web3.eth.coinbase
txn_params = math_contract._prepare_transaction(
fn_name='add',
fn_args=(7, 11),
transaction={'from': coinbase, 'to': math_contract_address},
)
trace = web3.parity.traceCall(txn_params)
assert trace['stateDiff'] is None
assert trace['vmTrace'] is None
result = hex_to_integer(trace['output'])
assert result == 18
def test_eth_call_with_0_result(self, web3, math_contract, math_contract_address):
coinbase = web3.eth.coinbase
txn_params = math_contract._prepare_transaction(
fn_name='add',
fn_args=(0, 0),
transaction={'from': coinbase, 'to': math_contract_address},
)
trace = web3.parity.traceCall(txn_params)
assert trace['stateDiff'] is None
assert trace['vmTrace'] is None
result = hex_to_integer(trace['output'])
assert result == 0
@pytest.mark.parametrize(
'raw_transaction',
[
(
# address 0x39EEed73fb1D3855E90Cbd42f348b3D7b340aAA6
'0xf8648085174876e8008252089439eeed73fb1d3855e90cbd42f348b3d7b340aaa601801ba0ec1295f00936acd0c2cb90ab2cdaacb8bf5e11b3d9957833595aca9ceedb7aada05dfc8937baec0e26029057abd3a1ef8c505dca2cdc07ffacb046d090d2bea06a' # noqa: E501
),
]
)
def test_trace_raw_transaction(self,
web3,
raw_transaction,
funded_account_for_raw_txn):
trace = web3.parity.traceRawTransaction(raw_transaction)
assert trace['stateDiff'] is None
assert trace['vmTrace'] is None
assert trace['trace'][0]['action']['from'] == funded_account_for_raw_txn.lower()
def test_trace_filter(self, web3, txn_filter_params, parity_fixture_data):
trace = web3.parity.traceFilter(txn_filter_params)
assert isinstance(trace, list)
assert trace[0]['action']['from'] == add_0x_prefix(parity_fixture_data['coinbase']) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/module_testing/parity_module.py | parity_module.py |
from eth_utils import (
is_checksum_address,
is_list_like,
is_same_address,
)
PRIVATE_KEY_HEX = '0x56ebb41875ceedd42e395f730e03b5c44989393c9f0484ee6bc05f933673458f'
PASSWORD = 'web3-testing'
ADDRESS = '0x844B417c0C58B02c2224306047B9fb0D3264fE8c'
PRIVATE_KEY_FOR_UNLOCK = '0x392f63a79b1ff8774845f3fa69de4a13800a59e7083f5187f1558f0797ad0f01'
ACCOUNT_FOR_UNLOCK = '0x12efDc31B1a8FA1A1e756DFD8A1601055C971E13'
class PersonalModuleTest:
def test_personal_importRawKey(self, web3):
actual = web3.personal.importRawKey(PRIVATE_KEY_HEX, PASSWORD)
assert actual == ADDRESS
def test_personal_listAccounts(self, web3):
accounts = web3.personal.listAccounts
assert is_list_like(accounts)
assert len(accounts) > 0
assert all((
is_checksum_address(item)
for item
in accounts
))
def test_personal_lockAccount(self, web3, unlockable_account_dual_type):
# TODO: how do we test this better?
web3.personal.lockAccount(unlockable_account_dual_type)
def test_personal_unlockAccount_success(self,
web3,
unlockable_account_dual_type,
unlockable_account_pw):
result = web3.personal.unlockAccount(unlockable_account_dual_type, unlockable_account_pw)
assert result is True
def test_personal_unlockAccount_failure(self,
web3,
unlockable_account_dual_type):
result = web3.personal.unlockAccount(unlockable_account_dual_type, 'bad-password')
assert result is False
def test_personal_newAccount(self, web3):
new_account = web3.personal.newAccount(PASSWORD)
assert is_checksum_address(new_account)
def test_personal_sendTransaction(self,
web3,
unlockable_account_dual_type,
unlockable_account_pw):
assert web3.eth.getBalance(unlockable_account_dual_type) > web3.toWei(1, 'ether')
txn_params = {
'from': unlockable_account_dual_type,
'to': unlockable_account_dual_type,
'gas': 21000,
'value': 1,
'gasPrice': web3.toWei(1, 'gwei'),
}
txn_hash = web3.personal.sendTransaction(txn_params, unlockable_account_pw)
assert txn_hash
transaction = web3.eth.getTransaction(txn_hash)
assert is_same_address(transaction['from'], txn_params['from'])
assert is_same_address(transaction['to'], txn_params['to'])
assert transaction['gas'] == txn_params['gas']
assert transaction['value'] == txn_params['value']
assert transaction['gasPrice'] == txn_params['gasPrice']
def test_personal_sign_and_ecrecover(self,
web3,
unlockable_account_dual_type,
unlockable_account_pw):
message = 'test-web3-personal-sign'
signature = web3.personal.sign(message, unlockable_account_dual_type, unlockable_account_pw)
signer = web3.personal.ecRecover(message, signature)
assert is_same_address(signer, unlockable_account_dual_type) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/module_testing/personal_module.py | personal_module.py |
import pytest
from hexbytes import (
HexBytes,
)
from web3 import Web3
from web3._utils.ens import (
ens_addresses,
)
from web3.exceptions import (
InvalidAddress,
)
class Web3ModuleTest:
def test_web3_clientVersion(self, web3):
client_version = web3.version.node
self._check_web3_clientVersion(client_version)
def _check_web3_clientVersion(self, client_version):
raise NotImplementedError("Must be implemented by subclasses")
# Contract that calculated test values can be found at
# https://kovan.etherscan.io/address/0xb9be06f5b99372cf9afbccadbbb9954ccaf7f4bb#code
@pytest.mark.parametrize(
'types,values,expected',
(
(
['bool'],
[True],
HexBytes("0x5fe7f977e71dba2ea1a68e21057beebb9be2ac30c6410aa38d4f3fbe41dcffd2"),
),
(
['uint8', 'uint8', 'uint8'],
[97, 98, 99],
HexBytes("0x4e03657aea45a94fc7d47ba826c8d667c0d1e6e33a64a036ec44f58fa12d6c45"),
),
(
['uint248'],
[30],
HexBytes("0x30f95d210785601eb33ae4d53d405b26f920e765dff87cca8e9a4aec99f82671"),
),
(
['bool', 'uint16'],
[True, 299],
HexBytes("0xed18599ccd80ee9fae9a28b0e34a5573c3233d7468f808fd659bc171cf0b43bd"),
),
(
['int256'],
[-10],
HexBytes("0xd6fb717f7e270a360f5093ce6a7a3752183e89c9a9afe5c0cb54b458a304d3d5"),
),
(
['int256'],
[10],
HexBytes("0xc65a7bb8d6351c1cf70c95a316cc6a92839c986682d98bc35f958f4883f9d2a8"),
),
(
['int8', 'uint8'],
[-10, 18],
HexBytes("0x5c6ab1e634c08d9c0f4df4d789e8727943ef010dd7ca8e3c89de197a26d148be"),
),
(
['address'],
["0x49eddd3769c0712032808d86597b84ac5c2f5614"],
InvalidAddress,
),
(
['address'],
["0x49EdDD3769c0712032808D86597B84ac5c2F5614"],
HexBytes("0x2ff37b5607484cd4eecf6d13292e22bd6e5401eaffcc07e279583bc742c68882"),
),
(
['bytes2'],
['0x5402'],
HexBytes("0x4ed9171bda52fca71ab28e7f452bd6eacc3e5a568a47e0fa53b503159a9b8910"),
),
(
['bytes3'],
['0x5402'],
HexBytes("0x4ed9171bda52fca71ab28e7f452bd6eacc3e5a568a47e0fa53b503159a9b8910"),
),
(
['bytes'],
[
'0x636865636b6c6f6e6762797465737472696e676167'
'61696e7374736f6c6964697479736861336861736866756e6374696f6e'
],
HexBytes("0xd78a84d65721b67e4011b10c99dafdedcdcd7cb30153064f773e210b4762e22f"),
),
(
['string'],
['testing a string!'],
HexBytes("0xe8c275c0b4070a5ec6cfcb83f0ba394b30ddd283de785d43f2eabfb04bd96747"),
),
(
['string', 'bool', 'uint16', 'bytes2', 'address'],
[
'testing a string!',
False,
299,
'0x5402',
"0x49eddd3769c0712032808d86597b84ac5c2f5614",
],
InvalidAddress,
),
(
['string', 'bool', 'uint16', 'bytes2', 'address'],
[
'testing a string!',
False,
299,
'0x5402',
"0x49EdDD3769c0712032808D86597B84ac5c2F5614",
],
HexBytes("0x8cc6eabb25b842715e8ca39e2524ed946759aa37bfb7d4b81829cf5a7e266103"),
),
(
['bool[2][]'],
[[[True, False], [False, True]]],
HexBytes("0x1eef261f2eb51a8c736d52be3f91ff79e78a9ec5df2b7f50d0c6f98ed1e2bc06"),
),
(
['bool[]'],
[[True, False, True]],
HexBytes("0x5c6090c0461491a2941743bda5c3658bf1ea53bbd3edcde54e16205e18b45792"),
),
(
['uint24[]'],
[[1, 0, 1]],
HexBytes("0x5c6090c0461491a2941743bda5c3658bf1ea53bbd3edcde54e16205e18b45792"),
),
(
['uint8[2]'],
[[8, 9]],
HexBytes("0xc7694af312c4f286114180fd0ba6a52461fcee8a381636770b19a343af92538a"),
),
(
['uint256[2]'],
[[8, 9]],
HexBytes("0xc7694af312c4f286114180fd0ba6a52461fcee8a381636770b19a343af92538a"),
),
(
['uint8[]'],
[[8]],
HexBytes("0xf3f7a9fe364faab93b216da50a3214154f22a0a2b415b23a84c8169e8b636ee3"),
),
(
['address[]'],
[[
"0x49EdDD3769c0712032808D86597B84ac5c2F5614",
"0xA6b759bBbf4B59D24acf7E06e79f3a5D104fdCE5",
]],
HexBytes("0xb98565c0c26a962fd54d93b0ed6fb9296e03e9da29d2281ed3e3473109ef7dde"),
),
(
['address[]'],
[[
"0x49EdDD3769c0712032808D86597B84ac5c2F5614",
"0xa6b759bbbf4b59d24acf7e06e79f3a5d104fdce5",
]],
InvalidAddress,
),
),
)
def test_solidityKeccak(self, web3, types, values, expected):
if isinstance(expected, type) and issubclass(expected, Exception):
with pytest.raises(expected):
web3.solidityKeccak(types, values)
return
actual = web3.solidityKeccak(types, values)
assert actual == expected
@pytest.mark.parametrize(
'types, values, expected',
(
(
['address'],
['one.eth'],
HexBytes("0x2ff37b5607484cd4eecf6d13292e22bd6e5401eaffcc07e279583bc742c68882"),
),
(
['address[]'],
[['one.eth', 'two.eth']],
HexBytes("0xb98565c0c26a962fd54d93b0ed6fb9296e03e9da29d2281ed3e3473109ef7dde"),
),
),
)
def test_solidityKeccak_ens(self, web3, types, values, expected):
with ens_addresses(web3, {
'one.eth': "0x49EdDD3769c0712032808D86597B84ac5c2F5614",
'two.eth': "0xA6b759bBbf4B59D24acf7E06e79f3a5D104fdCE5",
}):
# when called as class method, any name lookup attempt will fail
with pytest.raises(InvalidAddress):
Web3.solidityKeccak(types, values)
# when called as instance method, ens lookups can succeed
actual = web3.solidityKeccak(types, values)
assert actual == expected
@pytest.mark.parametrize(
'types,values',
(
(['address'], ['0xA6b759bBbf4B59D24acf7E06e79f3a5D104fdCE5', True]),
(['address', 'bool'], ['0xA6b759bBbf4B59D24acf7E06e79f3a5D104fdCE5']),
([], ['0xA6b759bBbf4B59D24acf7E06e79f3a5D104fdCE5']),
)
)
def test_solidityKeccak_same_number_of_types_and_values(self, web3, types, values):
with pytest.raises(ValueError):
web3.solidityKeccak(types, values)
def test_is_connected(self, web3):
assert web3.isConnected() | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/_utils/module_testing/web3_module.py | web3_module.py |
import collections
import math
import operator
from eth_utils import (
to_tuple,
)
from web3._utils.math import (
percentile,
)
from web3._utils.toolz import (
curry,
groupby,
sliding_window,
)
from web3.exceptions import (
InsufficientData,
ValidationError,
)
MinerData = collections.namedtuple(
'MinerData',
['miner', 'num_blocks', 'min_gas_price', 'low_percentile_gas_price'])
Probability = collections.namedtuple('Probability', ['gas_price', 'prob'])
def _get_avg_block_time(w3, sample_size):
latest = w3.eth.getBlock('latest')
constrained_sample_size = min(sample_size, latest['number'])
if constrained_sample_size == 0:
raise ValidationError('Constrained sample size is 0')
oldest = w3.eth.getBlock(latest['number'] - constrained_sample_size)
return (latest['timestamp'] - oldest['timestamp']) / constrained_sample_size
def _get_raw_miner_data(w3, sample_size):
latest = w3.eth.getBlock('latest', full_transactions=True)
for transaction in latest['transactions']:
yield (latest['miner'], latest['hash'], transaction['gasPrice'])
block = latest
for _ in range(sample_size - 1):
if block['number'] == 0:
break
# we intentionally trace backwards using parent hashes rather than
# block numbers to make caching the data easier to implement.
block = w3.eth.getBlock(block['parentHash'], full_transactions=True)
for transaction in block['transactions']:
yield (block['miner'], block['hash'], transaction['gasPrice'])
def _aggregate_miner_data(raw_data):
data_by_miner = groupby(0, raw_data)
for miner, miner_data in data_by_miner.items():
_, block_hashes, gas_prices = map(set, zip(*miner_data))
try:
price_percentile = percentile(gas_prices, percentile=20)
except InsufficientData:
price_percentile = min(gas_prices)
yield MinerData(
miner,
len(set(block_hashes)),
min(gas_prices),
price_percentile)
@to_tuple
def _compute_probabilities(miner_data, wait_blocks, sample_size):
"""
Computes the probabilities that a txn will be accepted at each of the gas
prices accepted by the miners.
"""
miner_data_by_price = tuple(sorted(
miner_data,
key=operator.attrgetter('low_percentile_gas_price'),
reverse=True,
))
for idx in range(len(miner_data_by_price)):
low_percentile_gas_price = miner_data_by_price[idx].low_percentile_gas_price
num_blocks_accepting_price = sum(m.num_blocks for m in miner_data_by_price[idx:])
inv_prob_per_block = (sample_size - num_blocks_accepting_price) / sample_size
probability_accepted = 1 - inv_prob_per_block ** wait_blocks
yield Probability(low_percentile_gas_price, probability_accepted)
def _compute_gas_price(probabilities, desired_probability):
"""
Given a sorted range of ``Probability`` named-tuples returns a gas price
computed based on where the ``desired_probability`` would fall within the
range.
:param probabilities: An iterable of `Probability` named-tuples sorted in reverse order.
:param desired_probability: An floating point representation of the desired
probability. (e.g. ``85% -> 0.85``)
"""
first = probabilities[0]
last = probabilities[-1]
if desired_probability >= first.prob:
return first.gas_price
elif desired_probability <= last.prob:
return last.gas_price
for left, right in sliding_window(2, probabilities):
if desired_probability < right.prob:
continue
elif desired_probability > left.prob:
# This code block should never be reachable as it would indicate
# that we already passed by the probability window in which our
# `desired_probability` is located.
raise Exception('Invariant')
adj_prob = desired_probability - right.prob
window_size = left.prob - right.prob
position = adj_prob / window_size
gas_window_size = left.gas_price - right.gas_price
gas_price = int(math.ceil(right.gas_price + gas_window_size * position))
return gas_price
else:
# The initial `if/else` clause in this function handles the case where
# the `desired_probability` is either above or below the min/max
# probability found in the `probabilities`.
#
# With these two cases handled, the only way this code block should be
# reachable would be if the `probabilities` were not sorted correctly.
# Otherwise, the `desired_probability` **must** fall between two of the
# values in the `probabilities``.
raise Exception('Invariant')
@curry
def construct_time_based_gas_price_strategy(max_wait_seconds,
sample_size=120,
probability=98):
"""
A gas pricing strategy that uses recently mined block data to derive a gas
price for which a transaction is likely to be mined within X seconds with
probability P.
:param max_wait_seconds: The desired maxiumum number of seconds the
transaction should take to mine.
:param sample_size: The number of recent blocks to sample
:param probability: An integer representation of the desired probability
that the transaction will be mined within ``max_wait_seconds``. 0 means 0%
and 100 means 100%.
"""
def time_based_gas_price_strategy(web3, transaction_params):
avg_block_time = _get_avg_block_time(web3, sample_size=sample_size)
wait_blocks = int(math.ceil(max_wait_seconds / avg_block_time))
raw_miner_data = _get_raw_miner_data(web3, sample_size=sample_size)
miner_data = _aggregate_miner_data(raw_miner_data)
probabilities = _compute_probabilities(
miner_data,
wait_blocks=wait_blocks,
sample_size=sample_size,
)
gas_price = _compute_gas_price(probabilities, probability / 100)
return gas_price
return time_based_gas_price_strategy
# fast: mine within 1 minute
fast_gas_price_strategy = construct_time_based_gas_price_strategy(
max_wait_seconds=60,
sample_size=120,
)
# medium: mine within 10 minutes
medium_gas_price_strategy = construct_time_based_gas_price_strategy(
max_wait_seconds=600,
sample_size=120,
)
# slow: mine within 1 hour (60 minutes)
slow_gas_price_strategy = construct_time_based_gas_price_strategy(
max_wait_seconds=60 * 60,
sample_size=120,
)
# glacial: mine within the next 24 hours.
glacial_gas_price_strategy = construct_time_based_gas_price_strategy(
max_wait_seconds=24 * 60 * 60,
sample_size=720,
) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/gas_strategies/time_based.py | time_based.py |
def construct_fixture_middleware(fixtures):
"""
Constructs a middleware which returns a static response for any method
which is found in the provided fixtures.
"""
def fixture_middleware(make_request, web3):
def middleware(method, params):
if method in fixtures:
result = fixtures[method]
return {'result': result}
else:
return make_request(method, params)
return middleware
return fixture_middleware
def construct_result_generator_middleware(result_generators):
"""
Constructs a middleware which intercepts requests for any method found in
the provided mapping of endpoints to generator functions, returning
whatever response the generator function returns. Callbacks must be
functions with the signature `fn(method, params)`.
"""
def result_generator_middleware(make_request, web3):
def middleware(method, params):
if method in result_generators:
result = result_generators[method](method, params)
return {'result': result}
else:
return make_request(method, params)
return middleware
return result_generator_middleware
def construct_error_generator_middleware(error_generators):
"""
Constructs a middleware which intercepts requests for any method found in
the provided mapping of endpoints to generator functions, returning
whatever error message the generator function returns. Callbacks must be
functions with the signature `fn(method, params)`.
"""
def error_generator_middleware(make_request, web3):
def middleware(method, params):
if method in error_generators:
error_msg = error_generators[method](method, params)
return {'error': error_msg}
else:
return make_request(method, params)
return middleware
return error_generator_middleware | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/middleware/fixture.py | fixture.py |
import codecs
import operator
from eth_utils.curried import (
combine_argument_formatters,
is_address,
is_bytes,
is_integer,
is_null,
is_string,
remove_0x_prefix,
text_if_str,
to_checksum_address,
)
from hexbytes import (
HexBytes,
)
from web3._utils.abi import (
is_length,
)
from web3._utils.encoding import (
hexstr_if_str,
to_hex,
)
from web3._utils.formatters import (
apply_formatter_at_index,
apply_formatter_if,
apply_formatter_to_array,
apply_formatters_to_dict,
apply_one_of_formatters,
hex_to_integer,
integer_to_hex,
is_array_of_dicts,
is_array_of_strings,
remove_key_if,
)
from web3._utils.toolz import (
complement,
compose,
curry,
partial,
)
from web3._utils.toolz.curried import (
keymap,
valmap,
)
from .formatting import (
construct_formatting_middleware,
)
def bytes_to_ascii(value):
return codecs.decode(value, 'ascii')
to_ascii_if_bytes = apply_formatter_if(is_bytes, bytes_to_ascii)
to_integer_if_hex = apply_formatter_if(is_string, hex_to_integer)
block_number_formatter = apply_formatter_if(is_integer, integer_to_hex)
is_false = partial(operator.is_, False)
is_not_false = complement(is_false)
is_not_null = complement(is_null)
@curry
def to_hexbytes(num_bytes, val, variable_length=False):
if isinstance(val, (str, int, bytes)):
result = HexBytes(val)
else:
raise TypeError("Cannot convert %r to HexBytes" % val)
extra_bytes = len(result) - num_bytes
if extra_bytes == 0 or (variable_length and extra_bytes < 0):
return result
elif all(byte == 0 for byte in result[:extra_bytes]):
return HexBytes(result[extra_bytes:])
else:
raise ValueError(
"The value %r is %d bytes, but should be %d" % (
result, len(result), num_bytes
)
)
TRANSACTION_FORMATTERS = {
'blockHash': apply_formatter_if(is_not_null, to_hexbytes(32)),
'blockNumber': apply_formatter_if(is_not_null, to_integer_if_hex),
'transactionIndex': apply_formatter_if(is_not_null, to_integer_if_hex),
'nonce': to_integer_if_hex,
'gas': to_integer_if_hex,
'gasPrice': to_integer_if_hex,
'value': to_integer_if_hex,
'from': to_checksum_address,
'publicKey': apply_formatter_if(is_not_null, to_hexbytes(64)),
'r': to_hexbytes(32, variable_length=True),
'raw': HexBytes,
's': to_hexbytes(32, variable_length=True),
'to': apply_formatter_if(is_address, to_checksum_address),
'hash': to_hexbytes(32),
'v': apply_formatter_if(is_not_null, to_integer_if_hex),
'standardV': apply_formatter_if(is_not_null, to_integer_if_hex),
}
transaction_formatter = apply_formatters_to_dict(TRANSACTION_FORMATTERS)
WHISPER_LOG_FORMATTERS = {
'sig': to_hexbytes(130),
'topic': to_hexbytes(8),
'payload': HexBytes,
'padding': apply_formatter_if(is_not_null, HexBytes),
'hash': to_hexbytes(64),
'recipientPublicKey': apply_formatter_if(is_not_null, to_hexbytes(130)),
}
whisper_log_formatter = apply_formatters_to_dict(WHISPER_LOG_FORMATTERS)
LOG_ENTRY_FORMATTERS = {
'blockHash': apply_formatter_if(is_not_null, to_hexbytes(32)),
'blockNumber': apply_formatter_if(is_not_null, to_integer_if_hex),
'transactionIndex': apply_formatter_if(is_not_null, to_integer_if_hex),
'transactionHash': apply_formatter_if(is_not_null, to_hexbytes(32)),
'logIndex': to_integer_if_hex,
'address': to_checksum_address,
'topics': apply_formatter_to_array(to_hexbytes(32)),
'data': to_ascii_if_bytes,
}
log_entry_formatter = apply_formatters_to_dict(LOG_ENTRY_FORMATTERS)
RECEIPT_FORMATTERS = {
'blockHash': apply_formatter_if(is_not_null, to_hexbytes(32)),
'blockNumber': apply_formatter_if(is_not_null, to_integer_if_hex),
'transactionIndex': apply_formatter_if(is_not_null, to_integer_if_hex),
'transactionHash': to_hexbytes(32),
'cumulativeGasUsed': to_integer_if_hex,
'status': to_integer_if_hex,
'gasUsed': to_integer_if_hex,
'contractAddress': apply_formatter_if(is_not_null, to_checksum_address),
'logs': apply_formatter_to_array(log_entry_formatter),
'logsBloom': to_hexbytes(256),
}
receipt_formatter = apply_formatters_to_dict(RECEIPT_FORMATTERS)
BLOCK_FORMATTERS = {
'extraData': to_hexbytes(32, variable_length=True),
'gasLimit': to_integer_if_hex,
'gasUsed': to_integer_if_hex,
'size': to_integer_if_hex,
'timestamp': to_integer_if_hex,
'hash': apply_formatter_if(is_not_null, to_hexbytes(32)),
'logsBloom': to_hexbytes(256),
'miner': apply_formatter_if(is_not_null, to_checksum_address),
'mixHash': to_hexbytes(32),
'nonce': apply_formatter_if(is_not_null, to_hexbytes(8, variable_length=True)),
'number': apply_formatter_if(is_not_null, to_integer_if_hex),
'parentHash': apply_formatter_if(is_not_null, to_hexbytes(32)),
'sha3Uncles': apply_formatter_if(is_not_null, to_hexbytes(32)),
'uncles': apply_formatter_to_array(to_hexbytes(32)),
'difficulty': to_integer_if_hex,
'receiptsRoot': to_hexbytes(32),
'stateRoot': to_hexbytes(32),
'totalDifficulty': to_integer_if_hex,
'transactions': apply_one_of_formatters((
(apply_formatter_to_array(transaction_formatter), is_array_of_dicts),
(apply_formatter_to_array(to_hexbytes(32)), is_array_of_strings),
)),
'transactionsRoot': to_hexbytes(32),
}
block_formatter = apply_formatters_to_dict(BLOCK_FORMATTERS)
SYNCING_FORMATTERS = {
'startingBlock': to_integer_if_hex,
'currentBlock': to_integer_if_hex,
'highestBlock': to_integer_if_hex,
'knownStates': to_integer_if_hex,
'pulledStates': to_integer_if_hex,
}
syncing_formatter = apply_formatters_to_dict(SYNCING_FORMATTERS)
TRANSACTION_POOL_CONTENT_FORMATTERS = {
'pending': compose(
keymap(to_ascii_if_bytes),
valmap(transaction_formatter),
),
'queued': compose(
keymap(to_ascii_if_bytes),
valmap(transaction_formatter),
),
}
transaction_pool_content_formatter = apply_formatters_to_dict(
TRANSACTION_POOL_CONTENT_FORMATTERS
)
TRANSACTION_POOL_INSPECT_FORMATTERS = {
'pending': keymap(to_ascii_if_bytes),
'queued': keymap(to_ascii_if_bytes),
}
transaction_pool_inspect_formatter = apply_formatters_to_dict(
TRANSACTION_POOL_INSPECT_FORMATTERS
)
FILTER_PARAMS_FORMATTERS = {
'fromBlock': apply_formatter_if(is_integer, integer_to_hex),
'toBlock': apply_formatter_if(is_integer, integer_to_hex),
}
filter_params_formatter = apply_formatters_to_dict(FILTER_PARAMS_FORMATTERS)
filter_result_formatter = apply_one_of_formatters((
(apply_formatter_to_array(log_entry_formatter), is_array_of_dicts),
(apply_formatter_to_array(to_hexbytes(32)), is_array_of_strings),
))
TRANSACTION_PARAM_FORMATTERS = {
'chainId': apply_formatter_if(is_integer, str),
}
transaction_param_formatter = compose(
remove_key_if('to', lambda txn: txn['to'] in {'', b'', None}),
apply_formatters_to_dict(TRANSACTION_PARAM_FORMATTERS),
)
estimate_gas_without_block_id = apply_formatter_at_index(transaction_param_formatter, 0)
estimate_gas_with_block_id = combine_argument_formatters(
transaction_param_formatter,
block_number_formatter,
)
pythonic_middleware = construct_formatting_middleware(
request_formatters={
# Eth
'eth_getBalance': apply_formatter_at_index(block_number_formatter, 1),
'eth_getBlockByNumber': apply_formatter_at_index(block_number_formatter, 0),
'eth_getBlockTransactionCountByNumber': apply_formatter_at_index(
block_number_formatter,
0,
),
'eth_getCode': apply_formatter_at_index(block_number_formatter, 1),
'eth_getStorageAt': apply_formatter_at_index(block_number_formatter, 2),
'eth_getTransactionByBlockNumberAndIndex': compose(
apply_formatter_at_index(block_number_formatter, 0),
apply_formatter_at_index(integer_to_hex, 1),
),
'eth_getTransactionCount': apply_formatter_at_index(block_number_formatter, 1),
'eth_getUncleCountByBlockNumber': apply_formatter_at_index(block_number_formatter, 0),
'eth_getUncleByBlockNumberAndIndex': compose(
apply_formatter_at_index(block_number_formatter, 0),
apply_formatter_at_index(integer_to_hex, 1),
),
'eth_getUncleByBlockHashAndIndex': apply_formatter_at_index(integer_to_hex, 1),
'eth_newFilter': apply_formatter_at_index(filter_params_formatter, 0),
'eth_getLogs': apply_formatter_at_index(filter_params_formatter, 0),
'eth_call': combine_argument_formatters(
transaction_param_formatter,
block_number_formatter,
),
'eth_estimateGas': apply_one_of_formatters((
(estimate_gas_without_block_id, is_length(1)),
(estimate_gas_with_block_id, is_length(2)),
)),
'eth_sendTransaction': apply_formatter_at_index(transaction_param_formatter, 0),
# personal
'personal_importRawKey': apply_formatter_at_index(
compose(remove_0x_prefix, hexstr_if_str(to_hex)),
0,
),
'personal_sign': apply_formatter_at_index(text_if_str(to_hex), 0),
'personal_ecRecover': apply_formatter_at_index(text_if_str(to_hex), 0),
'personal_sendTransaction': apply_formatter_at_index(transaction_param_formatter, 0),
# Snapshot and Revert
'evm_revert': apply_formatter_at_index(integer_to_hex, 0),
'trace_replayBlockTransactions': apply_formatter_at_index(block_number_formatter, 0),
'trace_block': apply_formatter_at_index(block_number_formatter, 0),
'trace_call': compose(
apply_formatter_at_index(transaction_param_formatter, 0),
apply_formatter_at_index(block_number_formatter, 2)
),
},
result_formatters={
# Eth
'eth_accounts': apply_formatter_to_array(to_checksum_address),
'eth_blockNumber': to_integer_if_hex,
'eth_coinbase': to_checksum_address,
'eth_estimateGas': to_integer_if_hex,
'eth_gasPrice': to_integer_if_hex,
'eth_getBalance': to_integer_if_hex,
'eth_getBlockByHash': apply_formatter_if(is_not_null, block_formatter),
'eth_getBlockByNumber': apply_formatter_if(is_not_null, block_formatter),
'eth_getBlockTransactionCountByHash': to_integer_if_hex,
'eth_getBlockTransactionCountByNumber': to_integer_if_hex,
'eth_getCode': HexBytes,
'eth_getFilterChanges': filter_result_formatter,
'eth_getFilterLogs': filter_result_formatter,
'eth_getLogs': filter_result_formatter,
'eth_getStorageAt': HexBytes,
'eth_getTransactionByBlockHashAndIndex': apply_formatter_if(
is_not_null,
transaction_formatter,
),
'eth_getTransactionByBlockNumberAndIndex': apply_formatter_if(
is_not_null,
transaction_formatter,
),
'eth_getTransactionByHash': apply_formatter_if(is_not_null, transaction_formatter),
'eth_getTransactionCount': to_integer_if_hex,
'eth_getTransactionReceipt': apply_formatter_if(
is_not_null,
receipt_formatter,
),
'eth_getUncleCountByBlockHash': to_integer_if_hex,
'eth_getUncleCountByBlockNumber': to_integer_if_hex,
'eth_hashrate': to_integer_if_hex,
'eth_protocolVersion': compose(
apply_formatter_if(is_integer, str),
to_integer_if_hex,
),
'eth_sendRawTransaction': to_hexbytes(32),
'eth_sendTransaction': to_hexbytes(32),
'eth_sign': HexBytes,
'eth_syncing': apply_formatter_if(is_not_false, syncing_formatter),
# personal
'personal_importRawKey': to_checksum_address,
'personal_listAccounts': apply_formatter_to_array(to_checksum_address),
'personal_newAccount': to_checksum_address,
'personal_sendTransaction': to_hexbytes(32),
# SHH
'shh_getFilterMessages': apply_formatter_to_array(whisper_log_formatter),
# Transaction Pool
'txpool_content': transaction_pool_content_formatter,
'txpool_inspect': transaction_pool_inspect_formatter,
# Snapshot and Revert
'evm_snapshot': hex_to_integer,
# Net
'net_peerCount': to_integer_if_hex,
},
) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/middleware/pythonic.py | pythonic.py |
import functools
import threading
import time
import lru
from web3._utils.caching import (
generate_cache_key,
)
SIMPLE_CACHE_RPC_WHITELIST = {
'web3_clientVersion',
'web3_sha3',
'net_version',
# 'net_peerCount',
# 'net_listening',
'eth_protocolVersion',
# 'eth_syncing',
# 'eth_coinbase',
# 'eth_mining',
# 'eth_hashrate',
# 'eth_gasPrice',
# 'eth_accounts',
# 'eth_blockNumber',
# 'eth_getBalance',
# 'eth_getStorageAt',
# 'eth_getTransactionCount',
'eth_getBlockTransactionCountByHash',
# 'eth_getBlockTransactionCountByNumber',
'eth_getUncleCountByBlockHash',
# 'eth_getUncleCountByBlockNumber',
# 'eth_getCode',
# 'eth_sign',
# 'eth_sendTransaction',
# 'eth_sendRawTransaction',
# 'eth_call',
# 'eth_estimateGas',
'eth_getBlockByHash',
# 'eth_getBlockByNumber',
'eth_getTransactionByHash',
'eth_getTransactionByBlockHashAndIndex',
# 'eth_getTransactionByBlockNumberAndIndex',
# 'eth_getTransactionReceipt',
'eth_getUncleByBlockHashAndIndex',
# 'eth_getUncleByBlockNumberAndIndex',
# 'eth_getCompilers',
# 'eth_compileLLL',
# 'eth_compileSolidity',
# 'eth_compileSerpent',
# 'eth_newFilter',
# 'eth_newBlockFilter',
# 'eth_newPendingTransactionFilter',
# 'eth_uninstallFilter',
# 'eth_getFilterChanges',
# 'eth_getFilterLogs',
# 'eth_getLogs',
# 'eth_getWork',
# 'eth_submitWork',
# 'eth_submitHashrate',
}
def _should_cache(method, params, response):
if 'error' in response:
return False
elif 'result' not in response:
return False
if response['result'] is None:
return False
return True
def construct_simple_cache_middleware(
cache_class,
rpc_whitelist=SIMPLE_CACHE_RPC_WHITELIST,
should_cache_fn=_should_cache):
"""
Constructs a middleware which caches responses based on the request
``method`` and ``params``
:param cache: Any dictionary-like object
:param rpc_whitelist: A set of RPC methods which may have their responses cached.
:param should_cache_fn: A callable which accepts ``method`` ``params`` and
``response`` and returns a boolean as to whether the response should be
cached.
"""
def simple_cache_middleware(make_request, web3):
cache = cache_class()
lock = threading.Lock()
def middleware(method, params):
lock_acquired = lock.acquire(blocking=False)
try:
if lock_acquired and method in rpc_whitelist:
cache_key = generate_cache_key((method, params))
if cache_key not in cache:
response = make_request(method, params)
if should_cache_fn(method, params, response):
cache[cache_key] = response
return response
return cache[cache_key]
else:
return make_request(method, params)
finally:
if lock_acquired:
lock.release()
return middleware
return simple_cache_middleware
_simple_cache_middleware = construct_simple_cache_middleware(
cache_class=functools.partial(lru.LRU, 256),
)
TIME_BASED_CACHE_RPC_WHITELIST = {
# 'web3_clientVersion',
# 'web3_sha3',
# 'net_version',
# 'net_peerCount',
# 'net_listening',
# 'eth_protocolVersion',
# 'eth_syncing',
'eth_coinbase',
# 'eth_mining',
# 'eth_hashrate',
# 'eth_gasPrice',
'eth_accounts',
# 'eth_blockNumber',
# 'eth_getBalance',
# 'eth_getStorageAt',
# 'eth_getTransactionCount',
# 'eth_getBlockTransactionCountByHash',
# 'eth_getBlockTransactionCountByNumber',
# 'eth_getUncleCountByBlockHash',
# 'eth_getUncleCountByBlockNumber',
# 'eth_getCode',
# 'eth_sign',
# 'eth_sendTransaction',
# 'eth_sendRawTransaction',
# 'eth_call',
# 'eth_estimateGas',
# 'eth_getBlockByHash',
# 'eth_getBlockByNumber',
# 'eth_getTransactionByHash',
# 'eth_getTransactionByBlockHashAndIndex',
# 'eth_getTransactionByBlockNumberAndIndex',
# 'eth_getTransactionReceipt',
# 'eth_getUncleByBlockHashAndIndex',
# 'eth_getUncleByBlockNumberAndIndex',
# 'eth_getCompilers',
# 'eth_compileLLL',
# 'eth_compileSolidity',
# 'eth_compileSerpent',
# 'eth_newFilter',
# 'eth_newBlockFilter',
# 'eth_newPendingTransactionFilter',
# 'eth_uninstallFilter',
# 'eth_getFilterChanges',
# 'eth_getFilterLogs',
# 'eth_getLogs',
# 'eth_getWork',
# 'eth_submitWork',
# 'eth_submitHashrate',
}
def construct_time_based_cache_middleware(
cache_class,
cache_expire_seconds=15,
rpc_whitelist=TIME_BASED_CACHE_RPC_WHITELIST,
should_cache_fn=_should_cache):
"""
Constructs a middleware which caches responses based on the request
``method`` and ``params`` for a maximum amount of time as specified
:param cache: Any dictionary-like object
:param cache_expire_seconds: The number of seconds an item may be cached
before it should expire.
:param rpc_whitelist: A set of RPC methods which may have their responses cached.
:param should_cache_fn: A callable which accepts ``method`` ``params`` and
``response`` and returns a boolean as to whether the response should be
cached.
"""
def time_based_cache_middleware(make_request, web3):
cache = cache_class()
lock = threading.Lock()
def middleware(method, params):
lock_acquired = lock.acquire(blocking=False)
try:
if lock_acquired and method in rpc_whitelist:
cache_key = generate_cache_key((method, params))
if cache_key in cache:
# check that the cached response is not expired.
cached_at, cached_response = cache[cache_key]
cached_for = time.time() - cached_at
if cached_for <= cache_expire_seconds:
return cached_response
else:
del cache[cache_key]
# cache either missed or expired so make the request.
response = make_request(method, params)
if should_cache_fn(method, params, response):
cache[cache_key] = (time.time(), response)
return response
else:
return make_request(method, params)
finally:
if lock_acquired:
lock.release()
return middleware
return time_based_cache_middleware
_time_based_cache_middleware = construct_time_based_cache_middleware(
cache_class=functools.partial(lru.LRU, 256),
)
BLOCK_NUMBER_RPC_WHITELIST = {
# 'web3_clientVersion',
# 'web3_sha3',
# 'net_version',
# 'net_peerCount',
# 'net_listening',
# 'eth_protocolVersion',
# 'eth_syncing',
# 'eth_coinbase',
# 'eth_mining',
# 'eth_hashrate',
'eth_gasPrice',
# 'eth_accounts',
'eth_blockNumber',
'eth_getBalance',
'eth_getStorageAt',
'eth_getTransactionCount',
# 'eth_getBlockTransactionCountByHash',
'eth_getBlockTransactionCountByNumber',
# 'eth_getUncleCountByBlockHash',
'eth_getUncleCountByBlockNumber',
'eth_getCode',
# 'eth_sign',
# 'eth_sendTransaction',
# 'eth_sendRawTransaction',
'eth_call',
'eth_estimateGas',
# 'eth_getBlockByHash',
'eth_getBlockByNumber',
# 'eth_getTransactionByHash',
# 'eth_getTransactionByBlockHashAndIndex',
'eth_getTransactionByBlockNumberAndIndex',
'eth_getTransactionReceipt',
# 'eth_getUncleByBlockHashAndIndex',
'eth_getUncleByBlockNumberAndIndex',
# 'eth_getCompilers',
# 'eth_compileLLL',
# 'eth_compileSolidity',
# 'eth_compileSerpent',
# 'eth_newFilter',
# 'eth_newBlockFilter',
# 'eth_newPendingTransactionFilter',
# 'eth_uninstallFilter',
# 'eth_getFilterChanges',
# 'eth_getFilterLogs',
'eth_getLogs',
# 'eth_getWork',
# 'eth_submitWork',
# 'eth_submitHashrate',
}
AVG_BLOCK_TIME_KEY = 'avg_block_time'
AVG_BLOCK_SAMPLE_SIZE_KEY = 'avg_block_sample_size'
AVG_BLOCK_TIME_UPDATED_AT_KEY = 'avg_block_time_updated_at'
def _is_latest_block_number_request(method, params):
if method != 'eth_getBlockByNumber':
return False
elif params[:1] == ['latest']:
return True
return False
def construct_latest_block_based_cache_middleware(
cache_class,
rpc_whitelist=BLOCK_NUMBER_RPC_WHITELIST,
average_block_time_sample_size=240,
default_average_block_time=15,
should_cache_fn=_should_cache):
"""
Constructs a middleware which caches responses based on the request
``method``, ``params``, and the current latest block hash.
:param cache: Any dictionary-like object
:param cache_expire_seconds: The number of seconds an item may be cached
before it should expire.
:param rpc_whitelist: A set of RPC methods which may have their responses cached.
:param should_cache_fn: A callable which accepts ``method`` ``params`` and
``response`` and returns a boolean as to whether the response should be
cached.
.. note::
This middleware avoids re-fetching the current latest block for each
request by tracking the current average block time and only requesting
a new block when the last seen latest block is older than the average
block time.
"""
def latest_block_based_cache_middleware(make_request, web3):
cache = cache_class()
block_info = {}
def _update_block_info_cache():
avg_block_time = block_info.get(AVG_BLOCK_TIME_KEY, default_average_block_time)
avg_block_sample_size = block_info.get(AVG_BLOCK_SAMPLE_SIZE_KEY, 0)
avg_block_time_updated_at = block_info.get(AVG_BLOCK_TIME_UPDATED_AT_KEY, 0)
# compute age as counted by number of blocks since the avg_block_time
if avg_block_time == 0:
avg_block_time_age_in_blocks = avg_block_sample_size
else:
avg_block_time_age_in_blocks = (
(time.time() - avg_block_time_updated_at) / avg_block_time
)
if avg_block_time_age_in_blocks >= avg_block_sample_size:
# If the length of time since the average block time as
# measured by blocks is greater than or equal to the number of
# blocks sampled then we need to recompute the average block
# time.
latest_block = web3.eth.getBlock('latest')
ancestor_block_number = max(
0,
latest_block['number'] - average_block_time_sample_size,
)
ancestor_block = web3.eth.getBlock(ancestor_block_number)
sample_size = latest_block['number'] - ancestor_block_number
block_info[AVG_BLOCK_SAMPLE_SIZE_KEY] = sample_size
if sample_size != 0:
block_info[AVG_BLOCK_TIME_KEY] = (
(latest_block['timestamp'] - ancestor_block['timestamp']) / sample_size
)
else:
block_info[AVG_BLOCK_TIME_KEY] = avg_block_time
block_info[AVG_BLOCK_TIME_UPDATED_AT_KEY] = time.time()
if 'latest_block' in block_info:
latest_block = block_info['latest_block']
time_since_latest_block = time.time() - latest_block['timestamp']
# latest block is too old so update cache
if time_since_latest_block > avg_block_time:
block_info['latest_block'] = web3.eth.getBlock('latest')
else:
# latest block has not been fetched so we fetch it.
block_info['latest_block'] = web3.eth.getBlock('latest')
lock = threading.Lock()
def middleware(method, params):
lock_acquired = lock.acquire(blocking=False)
try:
should_try_cache = (
lock_acquired and
method in rpc_whitelist and
not _is_latest_block_number_request(method, params)
)
if should_try_cache:
_update_block_info_cache()
latest_block_hash = block_info['latest_block']['hash']
cache_key = generate_cache_key((latest_block_hash, method, params))
if cache_key in cache:
return cache[cache_key]
response = make_request(method, params)
if should_cache_fn(method, params, response):
cache[cache_key] = response
return response
else:
return make_request(method, params)
finally:
if lock_acquired:
lock.release()
return middleware
return latest_block_based_cache_middleware
_latest_block_based_cache_middleware = construct_latest_block_based_cache_middleware(
cache_class=functools.partial(lru.LRU, 256),
rpc_whitelist=BLOCK_NUMBER_RPC_WHITELIST,
) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/middleware/cache.py | cache.py |
from eth_utils.curried import (
apply_formatter_at_index,
apply_formatter_if,
apply_formatters_to_dict,
is_null,
)
from hexbytes import (
HexBytes,
)
from web3._utils.toolz import (
complement,
compose,
curry,
dissoc,
)
from web3.exceptions import (
ValidationError,
)
from web3.middleware.formatting import (
construct_web3_formatting_middleware,
)
MAX_EXTRADATA_LENGTH = 32
is_not_null = complement(is_null)
@curry
def validate_chain_id(web3, chain_id):
if chain_id == web3.net.chainId:
return chain_id
else:
raise ValidationError(
"The transaction declared chain ID %r, "
"but the connected node is on %r" % (
chain_id,
"UNKNOWN",
)
)
def check_extradata_length(val):
if not isinstance(val, (str, int, bytes)):
return val
result = HexBytes(val)
if len(result) > MAX_EXTRADATA_LENGTH:
raise ValidationError(
"The field extraData is %d bytes, but should be %d. "
"It is quite likely that you are connected to a POA chain. "
"Refer "
"http://web3py.readthedocs.io/en/stable/middleware.html#geth-style-proof-of-authority "
"for more details. The full extraData is: %r" % (
len(result), MAX_EXTRADATA_LENGTH, result
)
)
return val
def transaction_normalizer(transaction):
return dissoc(transaction, 'chainId')
def transaction_param_validator(web3):
transactions_params_validators = {
'chainId': apply_formatter_if(
# Bypass `validate_chain_id` if chainId can't be determined
lambda _: is_not_null(web3.net.chainId),
validate_chain_id(web3)
),
}
return apply_formatter_at_index(
apply_formatters_to_dict(transactions_params_validators),
0
)
BLOCK_VALIDATORS = {
'extraData': check_extradata_length,
}
block_validator = apply_formatter_if(
is_not_null,
apply_formatters_to_dict(BLOCK_VALIDATORS)
)
@curry
def chain_id_validator(web3):
return compose(
apply_formatter_at_index(transaction_normalizer, 0),
transaction_param_validator(web3)
)
def build_validators_with_web3(w3):
return dict(
request_formatters={
'eth_sendTransaction': chain_id_validator(w3),
'eth_estimateGas': chain_id_validator(w3),
'eth_call': chain_id_validator(w3),
},
result_formatters={
'eth_getBlockByHash': block_validator,
'eth_getBlockByNumber': block_validator,
},
)
validation_middleware = construct_web3_formatting_middleware(build_validators_with_web3) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/middleware/validation.py | validation.py |
import itertools
import os
from eth_utils import (
apply_key_map,
to_hex,
to_list,
)
from web3._utils.toolz import (
concat,
valfilter,
)
if 'WEB3_MAX_BLOCK_REQUEST' in os.environ:
MAX_BLOCK_REQUEST = os.environ['WEB3_MAX_BLOCK_REQUEST']
else:
MAX_BLOCK_REQUEST = 50
def segment_count(start, stop, step=5):
"""Creates a segment counting generator
The generator returns tuple pairs of integers
that correspond to segments in the provided range.
:param start: The initial value of the counting range
:param stop: The last value in the
counting range
:param step: Optional, the segment length. Default is 5.
:type start: int
:type stop: int
:return: returns a generator object
Example:
>>> segment_counter = segment_count(start=0, stop=10, step=3)
>>> next(segment_counter)
(0, 3)
>>> next(segment_counter)
(3, 6)
>>> next(segment_counter)
(6, 9)
>>> next(segment_counter) # Remainder is also returned
(9, 10)
"""
return gen_bounded_segments(start, stop, step)
def gen_bounded_segments(start, stop, step):
# If the initial range is less than the step
# just return (start, stop)
if start + step >= stop:
yield (start, stop)
return
for segment in zip(
range(start, stop - step + 1, step),
range(start + step, stop + 1, step)):
yield segment
remainder = (stop - start) % step
# Handle the remainder
if remainder:
yield (stop - remainder, stop)
def block_ranges(start_block, last_block, step=5):
"""Returns 2-tuple ranges describing ranges of block from start_block to last_block
Ranges do not overlap to facilitate use as ``toBlock``, ``fromBlock``
json-rpc arguments, which are both inclusive.
"""
if last_block is not None and start_block > last_block:
raise TypeError(
"Incompatible start and stop arguments.",
"Start must be less than or equal to stop.")
return (
(from_block, to_block - 1)
for from_block, to_block
in segment_count(start_block, last_block + 1, step)
)
def iter_latest_block(w3, to_block=None):
"""Returns a generator that dispenses the latest block, if
any new blocks have been mined since last iteration.
If there are no new blocks None is returned.
If ``to_block`` is defined, ``StopIteration`` is raised
after to_block is reached.
>>> mined_blocks = dispense_mined_blocks(w3, 0, 10)
>>> next(new_blocks) # Latest block = 0
0
>>> next(new_blocks) # No new blocks
>>> next(new_blocks) # Latest block = 1
1
>>> next(new_blocks) # Latest block = 10
10
>>> next(new_blocks)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
StopIteration
>>>
"""
_last = None
is_bounded_range = (
to_block is not None and
to_block is not 'latest'
)
while True:
latest_block = w3.eth.blockNumber
if is_bounded_range and latest_block > to_block:
return
# No new blocks since last iteration.
if _last is not None and _last == latest_block:
yield None
else:
yield latest_block
_last = latest_block
def iter_latest_block_ranges(w3, from_block, to_block=None):
"""Returns an iterator unloading ranges of available blocks
starting from `fromBlock` to the latest mined block,
until reaching toBlock. e.g.:
>>> blocks_to_filter = iter_latest_block_ranges(w3, 0, 50)
>>> next(blocks_to_filter) # latest block number = 11
(0, 11)
>>> next(blocks_to_filter) # latest block number = 45
(12, 45)
>>> next(blocks_to_filter) # latest block number = 50
(46, 50)
"""
for latest_block in iter_latest_block(w3, to_block):
if latest_block is None:
yield (None, None)
elif from_block > latest_block:
yield (None, None)
else:
yield (from_block, latest_block)
from_block = latest_block + 1
def drop_items_with_none_value(params):
return valfilter(lambda x: x is not None, params)
def get_logs_multipart(
w3,
startBlock,
stopBlock,
address,
topics,
max_blocks):
"""Used to break up requests to ``eth_getLogs``
The getLog request is partitioned into multiple calls of the max number of blocks
``max_blocks``.
"""
_block_ranges = block_ranges(startBlock, stopBlock, max_blocks)
for from_block, to_block in _block_ranges:
params = {
'fromBlock': from_block,
'toBlock': to_block,
'address': address,
'topics': topics
}
yield w3.eth.getLogs(
drop_items_with_none_value(params))
class RequestLogs:
def __init__(
self,
w3,
from_block=None,
to_block=None,
address=None,
topics=None):
self.address = address
self.topics = topics
self.w3 = w3
if from_block is None or from_block == 'latest':
self._from_block = w3.eth.blockNumber + 1
else:
self._from_block = from_block
self._to_block = to_block
self.filter_changes = self._get_filter_changes()
@property
def from_block(self):
return self._from_block
@property
def to_block(self):
if self._to_block is None:
to_block = self.w3.eth.blockNumber
elif self._to_block == 'latest':
to_block = self.w3.eth.blockNumber
else:
to_block = self._to_block
return to_block
def _get_filter_changes(self):
for start, stop in iter_latest_block_ranges(self.w3, self.from_block, self.to_block):
if None in (start, stop):
yield []
yield list(
concat(
get_logs_multipart(
self.w3,
start,
stop,
self.address,
self.topics,
max_blocks=MAX_BLOCK_REQUEST)))
def get_logs(self):
return list(
concat(
get_logs_multipart(
self.w3,
self.from_block,
self.to_block,
self.address,
self.topics,
max_blocks=MAX_BLOCK_REQUEST)))
FILTER_PARAMS_KEY_MAP = {
'toBlock': 'to_block',
'fromBlock': 'from_block'
}
NEW_FILTER_METHODS = set([
'eth_newBlockFilter',
'eth_newFilter'])
FILTER_CHANGES_METHODS = set([
'eth_getFilterChanges',
'eth_getFilterLogs'])
class RequestBlocks:
def __init__(self, w3):
self.w3 = w3
self.start_block = w3.eth.blockNumber + 1
@property
def filter_changes(self):
return self.get_filter_changes()
def get_filter_changes(self):
block_range_iter = iter_latest_block_ranges(
self.w3,
self.start_block,
None)
for block_range in block_range_iter:
yield(block_hashes_in_range(self.w3, block_range))
@to_list
def block_hashes_in_range(w3, block_range):
from_block, to_block = block_range
for block_number in range(from_block, to_block + 1):
yield getattr(w3.eth.getBlock(block_number), 'hash', None)
def local_filter_middleware(make_request, w3):
filters = {}
filter_id_counter = map(to_hex, itertools.count())
def middleware(method, params):
if method in NEW_FILTER_METHODS:
filter_id = next(filter_id_counter)
if method == 'eth_newFilter':
_filter = RequestLogs(w3, **apply_key_map(FILTER_PARAMS_KEY_MAP, params[0]))
elif method == 'eth_newBlockFilter':
_filter = RequestBlocks(w3)
else:
raise NotImplementedError(method)
filters[filter_id] = _filter
return {'result': filter_id}
elif method in FILTER_CHANGES_METHODS:
filter_id = params[0]
# Pass through to filters not created by middleware
if filter_id not in filters:
return make_request(method, params)
_filter = filters[filter_id]
if method == 'eth_getFilterChanges':
return {'result': next(_filter.filter_changes)}
elif method == 'eth_getFilterLogs':
return {'result': _filter.get_logs()}
else:
raise NotImplementedError(method)
else:
return make_request(method, params)
return middleware | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/middleware/filter.py | filter.py |
import functools
from .abi import ( # noqa: F401
abi_middleware,
)
from .attrdict import ( # noqa: F401
attrdict_middleware,
)
from .cache import ( # noqa: F401
construct_simple_cache_middleware,
construct_time_based_cache_middleware,
construct_latest_block_based_cache_middleware,
_simple_cache_middleware as simple_cache_middleware,
_time_based_cache_middleware as time_based_cache_middleware,
_latest_block_based_cache_middleware as latest_block_based_cache_middleware,
)
from .exception_handling import ( # noqa: F401
construct_exception_handler_middleware,
)
from .filter import ( # noqa: F401
local_filter_middleware,
)
from .fixture import ( # noqa: F401
construct_fixture_middleware,
construct_result_generator_middleware,
construct_error_generator_middleware,
)
from .formatting import ( # noqa: F401
construct_formatting_middleware,
)
from .gas_price_strategy import ( # noqa: F401
gas_price_strategy_middleware,
)
from .names import ( # noqa: F401
name_to_address_middleware,
)
from .normalize_errors import ( # noqa: F401
normalize_errors_middleware,
)
from .normalize_request_parameters import ( # noqa: F401
request_parameter_normalizer,
)
from .pythonic import ( # noqa: F401
pythonic_middleware,
)
from .stalecheck import ( # noqa: F401
make_stalecheck_middleware,
)
from .exception_retry_request import ( # noqa: F401
http_retry_request_middleware
)
from .geth_poa import ( # noqa: F401
geth_poa_middleware,
)
from .validation import ( # noqa: F401
validation_middleware,
)
from .signing import ( # noqa: F401
construct_sign_and_send_raw_middleware,
)
def combine_middlewares(middlewares, web3, provider_request_fn):
"""
Returns a callable function which will call the provider.provider_request
function wrapped with all of the middlewares.
"""
return functools.reduce(
lambda request_fn, middleware: middleware(request_fn, web3),
reversed(middlewares),
provider_request_fn,
) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/middleware/__init__.py | __init__.py |
from requests.exceptions import (
ConnectionError,
HTTPError,
Timeout,
TooManyRedirects,
)
whitelist = [
'admin',
'shh',
'miner',
'net',
'txpool'
'testing',
'evm',
'eth_protocolVersion',
'eth_syncing',
'eth_coinbase',
'eth_mining',
'eth_hashrate',
'eth_gasPrice',
'eth_accounts',
'eth_blockNumber',
'eth_getBalance',
'eth_getStorageAt',
'eth_getCode',
'eth_getBlockByNumber',
'eth_getBlockByHash',
'eth_getBlockTransactionCountByNumber',
'eth_getBlockTransactionCountByHash',
'eth_getUncleCountByBlockNumber',
'eth_getUncleCountByBlockHash',
'eth_getTransactionByHash',
'eth_getTransactionByBlockHashAndIndex',
'eth_getTransactionByBlockNumberAndIndex',
'eth_getTransactionReceipt',
'eth_getTransactionCount',
'eth_call',
'eth_estimateGas',
'eth_newBlockFilter',
'eth_newPendingTransactionFilter',
'eth_newFilter',
'eth_getFilterChanges',
'eth_getFilterLogs',
'eth_getLogs',
'eth_uninstallFilter',
'eth_getCompilers',
'eth_getWork',
'eth_sign',
'eth_sendRawTransaction',
'personal_importRawKey',
'personal_newAccount',
'personal_listAccounts',
'personal_lockAccount',
'personal_unlockAccount',
'personal_ecRecover',
'personal_sign'
]
def check_if_retry_on_failure(method):
root = method.split('_')[0]
if root in whitelist:
return True
elif method in whitelist:
return True
else:
return False
def exception_retry_middleware(make_request, web3, errors, retries=5):
"""
Creates middleware that retries failed HTTP requests. Is a default
middleware for HTTPProvider.
"""
def middleware(method, params):
if check_if_retry_on_failure(method):
for i in range(retries):
try:
return make_request(method, params)
except errors:
if i < retries - 1:
continue
else:
raise
else:
return make_request(method, params)
return middleware
def http_retry_request_middleware(make_request, web3):
return exception_retry_middleware(
make_request,
web3,
(ConnectionError, HTTPError, Timeout, TooManyRedirects)
) | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/middleware/exception_retry_request.py | exception_retry_request.py |
from functools import (
singledispatch,
)
import operator
from eth_account import (
Account,
)
from eth_account.local import (
LocalAccount,
)
from eth_keys.datatypes import (
PrivateKey,
)
from eth_utils import (
to_dict,
)
from web3._utils.formatters import (
apply_formatter_if,
)
from web3._utils.rpc_abi import (
TRANSACTION_PARAMS_ABIS,
apply_abi_formatters_to_dict,
)
from web3._utils.toolz import (
compose,
)
from web3._utils.transactions import (
fill_nonce,
fill_transaction_defaults,
)
from .abi import (
STANDARD_NORMALIZERS,
)
to_hexstr_from_eth_key = operator.methodcaller('to_hex')
def is_eth_key(value):
return isinstance(value, PrivateKey)
key_normalizer = compose(
apply_formatter_if(is_eth_key, to_hexstr_from_eth_key),
)
@to_dict
def gen_normalized_accounts(val):
if isinstance(val, (list, tuple, set,)):
for i in val:
account = to_account(i)
yield account.address, account
else:
account = to_account(val)
yield account.address, account
return
@singledispatch
def to_account(val):
raise TypeError(
"key must be one of the types: "
"eth_keys.datatype.PrivateKey, eth_account.local.LocalAccount, "
"or raw private key as a hex string or byte string. "
"Was of type {0}".format(type(val)))
@to_account.register(LocalAccount)
def _(val):
return val
def private_key_to_account(val):
normalized_key = key_normalizer(val)
return Account.privateKeyToAccount(normalized_key)
to_account.register(PrivateKey, private_key_to_account)
to_account.register(str, private_key_to_account)
to_account.register(bytes, private_key_to_account)
def format_transaction(transaction):
"""Format transaction so that it can be used correctly in the signing middleware.
Converts bytes to hex strings and other types that can be passed to the underlying layers.
Also has the effect of normalizing 'from' for easier comparisons.
"""
return apply_abi_formatters_to_dict(STANDARD_NORMALIZERS, TRANSACTION_PARAMS_ABIS, transaction)
def construct_sign_and_send_raw_middleware(private_key_or_account):
"""Capture transactions sign and send as raw transactions
Keyword arguments:
private_key_or_account -- A single private key or a tuple,
list or set of private keys. Keys can be any of the following formats:
- An eth_account.LocalAccount object
- An eth_keys.PrivateKey object
- A raw private key as a hex string or byte string
"""
accounts = gen_normalized_accounts(private_key_or_account)
def sign_and_send_raw_middleware(make_request, w3):
format_and_fill_tx = compose(
format_transaction,
fill_transaction_defaults(w3),
fill_nonce(w3))
def middleware(method, params):
if method != "eth_sendTransaction":
return make_request(method, params)
else:
transaction = format_and_fill_tx(params[0])
if 'from' not in transaction:
return make_request(method, params)
elif transaction.get('from') not in accounts:
return make_request(method, params)
account = accounts[transaction['from']]
raw_tx = account.signTransaction(transaction).rawTransaction
return make_request(
"eth_sendRawTransaction",
[raw_tx])
return middleware
return sign_and_send_raw_middleware | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/middleware/signing.py | signing.py |
import logging
import os
from eth_utils import (
to_dict,
)
from web3._utils.http import (
construct_user_agent,
)
from web3._utils.request import (
make_post_request,
)
from web3.datastructures import (
NamedElementOnion,
)
from web3.middleware import (
http_retry_request_middleware,
)
from .base import (
JSONBaseProvider,
)
def get_default_endpoint():
return os.environ.get('WEB3_HTTP_PROVIDER_URI', 'http://localhost:8545')
class HTTPProvider(JSONBaseProvider):
logger = logging.getLogger("web3.providers.HTTPProvider")
endpoint_uri = None
_request_args = None
_request_kwargs = None
_middlewares = NamedElementOnion([(http_retry_request_middleware, 'http_retry_request')])
def __init__(self, endpoint_uri=None, request_kwargs=None):
if endpoint_uri is None:
self.endpoint_uri = get_default_endpoint()
else:
self.endpoint_uri = endpoint_uri
self._request_kwargs = request_kwargs or {}
super().__init__()
def __str__(self):
return "RPC connection {0}".format(self.endpoint_uri)
@to_dict
def get_request_kwargs(self):
if 'headers' not in self._request_kwargs:
yield 'headers', self.get_request_headers()
for key, value in self._request_kwargs.items():
yield key, value
def get_request_headers(self):
return {
'Content-Type': 'application/json',
'User-Agent': construct_user_agent(str(type(self))),
}
def make_request(self, method, params):
self.logger.debug("Making request HTTP. URI: %s, Method: %s",
self.endpoint_uri, method)
request_data = self.encode_rpc_request(method, params)
raw_response = make_post_request(
self.endpoint_uri,
request_data,
**self.get_request_kwargs()
)
response = self.decode_rpc_response(raw_response)
self.logger.debug("Getting response HTTP. URI: %s, "
"Method: %s, Response: %s",
self.endpoint_uri, method, response)
return response | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/providers/rpc.py | rpc.py |
import itertools
from eth_utils import (
to_bytes,
to_text,
)
from web3._utils.encoding import (
FriendlyJsonSerde,
)
from web3.middleware import (
combine_middlewares,
)
class BaseProvider:
_middlewares = ()
_request_func_cache = (None, None) # a tuple of (all_middlewares, request_func)
@property
def middlewares(self):
return self._middlewares
@middlewares.setter
def middlewares(self, values):
self._middlewares = tuple(values)
def request_func(self, web3, outer_middlewares):
"""
@param outer_middlewares is an iterable of middlewares, ordered by first to execute
@returns a function that calls all the middleware and eventually self.make_request()
"""
all_middlewares = tuple(outer_middlewares) + tuple(self.middlewares)
cache_key = self._request_func_cache[0]
if cache_key is None or cache_key != all_middlewares:
self._request_func_cache = (
all_middlewares,
self._generate_request_func(web3, all_middlewares)
)
return self._request_func_cache[-1]
def _generate_request_func(self, web3, middlewares):
return combine_middlewares(
middlewares=middlewares,
web3=web3,
provider_request_fn=self.make_request,
)
def make_request(self, method, params):
raise NotImplementedError("Providers must implement this method")
def isConnected(self):
raise NotImplementedError("Providers must implement this method")
class JSONBaseProvider(BaseProvider):
def __init__(self):
self.request_counter = itertools.count()
def decode_rpc_response(self, response):
text_response = to_text(response)
return FriendlyJsonSerde().json_decode(text_response)
def encode_rpc_request(self, method, params):
rpc_dict = {
"jsonrpc": "2.0",
"method": method,
"params": params or [],
"id": next(self.request_counter),
}
encoded = FriendlyJsonSerde().json_encode(rpc_dict)
return to_bytes(text=encoded)
def isConnected(self):
try:
response = self.make_request('web3_clientVersion', [])
except IOError:
return False
assert response['jsonrpc'] == '2.0'
assert 'error' not in response
return True | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/providers/base.py | base.py |
import asyncio
import json
import logging
import os
from threading import (
Thread,
)
import websockets
from web3.exceptions import (
ValidationError,
)
from web3.providers.base import (
JSONBaseProvider,
)
RESTRICTED_WEBSOCKET_KWARGS = {'uri', 'loop'}
DEFAULT_WEBSOCKET_TIMEOUT = 10
def _start_event_loop(loop):
asyncio.set_event_loop(loop)
loop.run_forever()
loop.close()
def _get_threaded_loop():
new_loop = asyncio.new_event_loop()
thread_loop = Thread(target=_start_event_loop, args=(new_loop,), daemon=True)
thread_loop.start()
return new_loop
def get_default_endpoint():
return os.environ.get('WEB3_WS_PROVIDER_URI', 'ws://127.0.0.1:8546')
class PersistentWebSocket:
def __init__(self, endpoint_uri, loop, websocket_kwargs):
self.ws = None
self.endpoint_uri = endpoint_uri
self.loop = loop
self.websocket_kwargs = websocket_kwargs
async def __aenter__(self):
if self.ws is None:
self.ws = await websockets.connect(
uri=self.endpoint_uri, loop=self.loop, **self.websocket_kwargs
)
return self.ws
async def __aexit__(self, exc_type, exc_val, exc_tb):
if exc_val is not None:
try:
await self.ws.close()
except Exception:
pass
self.ws = None
class WebsocketProvider(JSONBaseProvider):
logger = logging.getLogger("web3.providers.WebsocketProvider")
_loop = None
def __init__(
self,
endpoint_uri=None,
websocket_kwargs=None,
websocket_timeout=DEFAULT_WEBSOCKET_TIMEOUT
):
self.endpoint_uri = endpoint_uri
self.websocket_timeout = websocket_timeout
if self.endpoint_uri is None:
self.endpoint_uri = get_default_endpoint()
if WebsocketProvider._loop is None:
WebsocketProvider._loop = _get_threaded_loop()
if websocket_kwargs is None:
websocket_kwargs = {}
else:
found_restricted_keys = set(websocket_kwargs.keys()).intersection(
RESTRICTED_WEBSOCKET_KWARGS
)
if found_restricted_keys:
raise ValidationError(
'{0} are not allowed in websocket_kwargs, '
'found: {1}'.format(RESTRICTED_WEBSOCKET_KWARGS, found_restricted_keys)
)
self.conn = PersistentWebSocket(
self.endpoint_uri, WebsocketProvider._loop, websocket_kwargs
)
super().__init__()
def __str__(self):
return "WS connection {0}".format(self.endpoint_uri)
async def coro_make_request(self, request_data):
async with self.conn as conn:
await asyncio.wait_for(
conn.send(request_data),
timeout=self.websocket_timeout
)
return json.loads(
await asyncio.wait_for(
conn.recv(),
timeout=self.websocket_timeout
)
)
def make_request(self, method, params):
self.logger.debug("Making request WebSocket. URI: %s, "
"Method: %s", self.endpoint_uri, method)
request_data = self.encode_rpc_request(method, params)
future = asyncio.run_coroutine_threadsafe(
self.coro_make_request(request_data),
WebsocketProvider._loop
)
return future.result() | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/providers/websocket.py | websocket.py |
import os
from urllib.parse import (
urlparse,
)
from web3.exceptions import (
CannotHandleRequest,
)
from web3.providers import (
BaseProvider,
HTTPProvider,
IPCProvider,
WebsocketProvider,
)
HTTP_SCHEMES = {'http', 'https'}
WS_SCHEMES = {'ws', 'wss'}
def load_provider_from_environment():
uri_string = os.environ.get('WEB3_PROVIDER_URI', '')
if not uri_string:
return None
return load_provider_from_uri(uri_string)
def load_provider_from_uri(uri_string):
uri = urlparse(uri_string)
if uri.scheme == 'file':
return IPCProvider(uri.path)
elif uri.scheme in HTTP_SCHEMES:
return HTTPProvider(uri_string)
elif uri.scheme in WS_SCHEMES:
return WebsocketProvider(uri_string)
else:
raise NotImplementedError(
'Web3 does not know how to connect to scheme %r in %r' % (
uri.scheme,
uri_string,
)
)
class AutoProvider(BaseProvider):
default_providers = (
load_provider_from_environment,
IPCProvider,
HTTPProvider,
WebsocketProvider,
)
_active_provider = None
def __init__(self, potential_providers=None):
"""
:param iterable potential_providers: ordered series of provider classes to attempt with
AutoProvider will initialize each potential provider (without arguments),
in an attempt to find an active node. The list will default to
:attribute:`default_providers`.
"""
if potential_providers:
self._potential_providers = potential_providers
else:
self._potential_providers = self.default_providers
def make_request(self, method, params):
try:
return self._proxy_request(method, params)
except IOError as exc:
return self._proxy_request(method, params, use_cache=False)
def isConnected(self):
provider = self._get_active_provider(use_cache=True)
return provider is not None and provider.isConnected()
def _proxy_request(self, method, params, use_cache=True):
provider = self._get_active_provider(use_cache)
if provider is None:
raise CannotHandleRequest(
"Could not discover provider while making request: "
"method:{0}\n"
"params:{1}\n".format(
method,
params))
return provider.make_request(method, params)
def _get_active_provider(self, use_cache):
if use_cache and self._active_provider is not None:
return self._active_provider
for Provider in self._potential_providers:
provider = Provider()
if provider is not None and provider.isConnected():
self._active_provider = provider
return provider
return None | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/providers/auto.py | auto.py |
import logging
import os
from pathlib import (
Path,
)
import socket
import sys
import threading
from web3._utils.threads import (
Timeout,
)
from .base import (
JSONBaseProvider,
)
try:
from json import JSONDecodeError
except ImportError:
JSONDecodeError = ValueError
def get_ipc_socket(ipc_path, timeout=0.1):
if sys.platform == 'win32':
# On Windows named pipe is used. Simulate socket with it.
from web3._utils.windows import NamedPipe
return NamedPipe(ipc_path)
else:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(ipc_path)
sock.settimeout(timeout)
return sock
class PersistantSocket:
sock = None
def __init__(self, ipc_path):
self.ipc_path = ipc_path
def __enter__(self):
if not self.ipc_path:
raise FileNotFoundError("cannot connect to IPC socket at path: %r" % self.ipc_path)
if not self.sock:
self.sock = self._open()
return self.sock
def __exit__(self, exc_type, exc_value, traceback):
# only close the socket if there was an error
if exc_value is not None:
try:
self.sock.close()
except Exception:
pass
self.sock = None
def _open(self):
return get_ipc_socket(self.ipc_path)
def reset(self):
self.sock.close()
self.sock = self._open()
return self.sock
def get_default_ipc_path():
if sys.platform == 'darwin':
ipc_path = os.path.expanduser(os.path.join(
"~",
"Library",
"Ethereum",
"geth.ipc"
))
if os.path.exists(ipc_path):
return ipc_path
ipc_path = os.path.expanduser(os.path.join(
"~",
"Library",
"Application Support",
"io.parity.ethereum",
"jsonrpc.ipc"
))
if os.path.exists(ipc_path):
return ipc_path
base_trinity_path = Path('~').expanduser() / '.local' / 'share' / 'trinity'
ipc_path = base_trinity_path / 'mainnet' / 'jsonrpc.ipc'
if ipc_path.exists():
return str(ipc_path)
elif sys.platform.startswith('linux') or sys.platform.startswith('freebsd'):
ipc_path = os.path.expanduser(os.path.join(
"~",
".ethereum",
"geth.ipc"
))
if os.path.exists(ipc_path):
return ipc_path
ipc_path = os.path.expanduser(os.path.join(
"~",
".local",
"share",
"io.parity.ethereum",
"jsonrpc.ipc"
))
if os.path.exists(ipc_path):
return ipc_path
base_trinity_path = Path('~').expanduser() / '.local' / 'share' / 'trinity'
ipc_path = base_trinity_path / 'mainnet' / 'jsonrpc.ipc'
if ipc_path.exists():
return str(ipc_path)
elif sys.platform == 'win32':
ipc_path = os.path.join(
"\\\\",
".",
"pipe",
"geth.ipc"
)
if os.path.exists(ipc_path):
return ipc_path
ipc_path = os.path.join(
"\\\\",
".",
"pipe",
"jsonrpc.ipc"
)
if os.path.exists(ipc_path):
return ipc_path
else:
raise ValueError(
"Unsupported platform '{0}'. Only darwin/linux/win32/freebsd are "
"supported. You must specify the ipc_path".format(sys.platform)
)
def get_dev_ipc_path():
if sys.platform == 'darwin':
tmpdir = os.environ.get('TMPDIR', '')
ipc_path = os.path.expanduser(os.path.join(
tmpdir,
"geth.ipc"
))
if os.path.exists(ipc_path):
return ipc_path
elif sys.platform.startswith('linux') or sys.platform.startswith('freebsd'):
ipc_path = os.path.expanduser(os.path.join(
"/tmp",
"geth.ipc"
))
if os.path.exists(ipc_path):
return ipc_path
elif sys.platform == 'win32':
ipc_path = os.path.join(
"\\\\",
".",
"pipe",
"geth.ipc"
)
if os.path.exists(ipc_path):
return ipc_path
ipc_path = os.path.join(
"\\\\",
".",
"pipe",
"jsonrpc.ipc"
)
if os.path.exists(ipc_path):
return ipc_path
else:
raise ValueError(
"Unsupported platform '{0}'. Only darwin/linux/win32/freebsd are "
"supported. You must specify the ipc_path".format(sys.platform)
)
class IPCProvider(JSONBaseProvider):
logger = logging.getLogger("web3.providers.IPCProvider")
_socket = None
def __init__(self, ipc_path=None, timeout=10, *args, **kwargs):
if ipc_path is None:
self.ipc_path = get_default_ipc_path()
else:
if isinstance(ipc_path, Path):
ipc_path = str(ipc_path.resolve())
self.ipc_path = ipc_path
self.timeout = timeout
self._lock = threading.Lock()
self._socket = PersistantSocket(self.ipc_path)
super().__init__(*args, **kwargs)
def make_request(self, method, params):
self.logger.debug("Making request IPC. Path: %s, Method: %s",
self.ipc_path, method)
request = self.encode_rpc_request(method, params)
with self._lock, self._socket as sock:
try:
sock.sendall(request)
except BrokenPipeError:
# one extra attempt, then give up
sock = self._socket.reset()
sock.sendall(request)
raw_response = b""
with Timeout(self.timeout) as timeout:
while True:
try:
raw_response += sock.recv(4096)
except socket.timeout:
timeout.sleep(0)
continue
if raw_response == b"":
timeout.sleep(0)
elif has_valid_json_rpc_ending(raw_response):
try:
response = self.decode_rpc_response(raw_response)
except JSONDecodeError:
timeout.sleep(0)
continue
else:
return response
else:
timeout.sleep(0)
continue
# A valid JSON RPC response can only end in } or ] http://www.jsonrpc.org/specification
def has_valid_json_rpc_ending(raw_response):
stripped_raw_response = raw_response.rstrip()
for valid_ending in [b"}", b"]"]:
if stripped_raw_response.endswith(valid_ending):
return True
else:
return False | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/providers/ipc.py | ipc.py |
import operator
from eth_utils import (
is_dict,
is_hex,
is_string,
)
from web3._utils.formatters import (
apply_formatter_if,
apply_formatter_to_array,
apply_formatters_to_args,
apply_formatters_to_dict,
apply_key_map,
hex_to_integer,
integer_to_hex,
is_array_of_dicts,
remove_key_if,
static_return,
)
from web3._utils.toolz import (
assoc,
complement,
compose,
curry,
identity,
partial,
pipe,
)
from web3.middleware import (
construct_formatting_middleware,
)
def is_named_block(value):
return value in {"latest", "earliest", "pending"}
def is_hexstr(value):
return is_string(value) and is_hex(value)
to_integer_if_hex = apply_formatter_if(is_hexstr, hex_to_integer)
is_not_named_block = complement(is_named_block)
TRANSACTION_KEY_MAPPINGS = {
'block_hash': 'blockHash',
'block_number': 'blockNumber',
'gas_price': 'gasPrice',
'transaction_hash': 'transactionHash',
'transaction_index': 'transactionIndex',
}
transaction_key_remapper = apply_key_map(TRANSACTION_KEY_MAPPINGS)
LOG_KEY_MAPPINGS = {
'log_index': 'logIndex',
'transaction_index': 'transactionIndex',
'transaction_hash': 'transactionHash',
'block_hash': 'blockHash',
'block_number': 'blockNumber',
}
log_key_remapper = apply_key_map(LOG_KEY_MAPPINGS)
RECEIPT_KEY_MAPPINGS = {
'block_hash': 'blockHash',
'block_number': 'blockNumber',
'contract_address': 'contractAddress',
'gas_used': 'gasUsed',
'cumulative_gas_used': 'cumulativeGasUsed',
'transaction_hash': 'transactionHash',
'transaction_index': 'transactionIndex',
}
receipt_key_remapper = apply_key_map(RECEIPT_KEY_MAPPINGS)
BLOCK_KEY_MAPPINGS = {
'gas_limit': 'gasLimit',
'sha3_uncles': 'sha3Uncles',
'transactions_root': 'transactionsRoot',
'parent_hash': 'parentHash',
'bloom': 'logsBloom',
'state_root': 'stateRoot',
'receipt_root': 'receiptsRoot',
'total_difficulty': 'totalDifficulty',
'extra_data': 'extraData',
'gas_used': 'gasUsed',
}
block_key_remapper = apply_key_map(BLOCK_KEY_MAPPINGS)
TRANSACTION_PARAMS_MAPPING = {
'gasPrice': 'gas_price',
}
transaction_params_remapper = apply_key_map(TRANSACTION_PARAMS_MAPPING)
TRANSACTION_PARAMS_FORMATTERS = {
'gas': to_integer_if_hex,
'gasPrice': to_integer_if_hex,
'value': to_integer_if_hex,
'nonce': to_integer_if_hex,
}
transaction_params_formatter = compose(
# remove nonce for now due to issue https://github.com/ethereum/eth-tester/issues/80
remove_key_if('nonce', lambda _: True),
apply_formatters_to_dict(TRANSACTION_PARAMS_FORMATTERS),
)
FILTER_PARAMS_MAPPINGS = {
'fromBlock': 'from_block',
'toBlock': 'to_block',
}
filter_params_remapper = apply_key_map(FILTER_PARAMS_MAPPINGS)
FILTER_PARAMS_FORMATTERS = {
'fromBlock': to_integer_if_hex,
'toBlock': to_integer_if_hex,
}
filter_params_formatter = apply_formatters_to_dict(FILTER_PARAMS_FORMATTERS)
filter_params_transformer = compose(filter_params_remapper, filter_params_formatter)
TRANSACTION_FORMATTERS = {
'to': apply_formatter_if(partial(operator.eq, ''), static_return(None)),
}
transaction_formatter = apply_formatters_to_dict(TRANSACTION_FORMATTERS)
RECEIPT_FORMATTERS = {
'logs': apply_formatter_to_array(log_key_remapper),
}
receipt_formatter = apply_formatters_to_dict(RECEIPT_FORMATTERS)
transaction_params_transformer = compose(transaction_params_remapper, transaction_params_formatter)
ethereum_tester_middleware = construct_formatting_middleware(
request_formatters={
# Eth
'eth_getBlockByNumber': apply_formatters_to_args(
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
'eth_getFilterChanges': apply_formatters_to_args(hex_to_integer),
'eth_getFilterLogs': apply_formatters_to_args(hex_to_integer),
'eth_getBlockTransactionCountByNumber': apply_formatters_to_args(
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
'eth_getUncleCountByBlockNumber': apply_formatters_to_args(
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
'eth_getTransactionByBlockHashAndIndex': apply_formatters_to_args(
identity,
to_integer_if_hex,
),
'eth_getTransactionByBlockNumberAndIndex': apply_formatters_to_args(
apply_formatter_if(is_not_named_block, to_integer_if_hex),
to_integer_if_hex,
),
'eth_getUncleByBlockNumberAndIndex': apply_formatters_to_args(
apply_formatter_if(is_not_named_block, to_integer_if_hex),
to_integer_if_hex,
),
'eth_newFilter': apply_formatters_to_args(
filter_params_transformer,
),
'eth_getLogs': apply_formatters_to_args(
filter_params_transformer,
),
'eth_sendTransaction': apply_formatters_to_args(
transaction_params_transformer,
),
'eth_estimateGas': apply_formatters_to_args(
transaction_params_transformer,
),
'eth_call': apply_formatters_to_args(
transaction_params_transformer,
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
'eth_uninstallFilter': apply_formatters_to_args(hex_to_integer),
'eth_getCode': apply_formatters_to_args(
identity,
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
# EVM
'evm_revert': apply_formatters_to_args(hex_to_integer),
# Personal
'personal_sendTransaction': apply_formatters_to_args(
transaction_params_transformer,
identity,
),
},
result_formatters={
'eth_getBlockByHash': apply_formatter_if(
is_dict,
block_key_remapper,
),
'eth_getBlockByNumber': apply_formatter_if(
is_dict,
block_key_remapper,
),
'eth_getBlockTransactionCountByHash': apply_formatter_if(
is_dict,
transaction_key_remapper,
),
'eth_getBlockTransactionCountByNumber': apply_formatter_if(
is_dict,
transaction_key_remapper,
),
'eth_getTransactionByHash': apply_formatter_if(
is_dict,
compose(transaction_key_remapper, transaction_formatter),
),
'eth_getTransactionReceipt': apply_formatter_if(
is_dict,
compose(receipt_key_remapper, receipt_formatter),
),
'eth_newFilter': integer_to_hex,
'eth_newBlockFilter': integer_to_hex,
'eth_newPendingTransactionFilter': integer_to_hex,
'eth_getLogs': apply_formatter_if(
is_array_of_dicts,
apply_formatter_to_array(log_key_remapper),
),
'eth_getFilterChanges': apply_formatter_if(
is_array_of_dicts,
apply_formatter_to_array(log_key_remapper),
),
'eth_getFilterLogs': apply_formatter_if(
is_array_of_dicts,
apply_formatter_to_array(log_key_remapper),
),
# EVM
'evm_snapshot': integer_to_hex,
},
)
def guess_from(web3, transaction):
coinbase = web3.eth.coinbase
if coinbase is not None:
return coinbase
try:
return web3.eth.accounts[0]
except KeyError as e:
# no accounts available to pre-fill, carry on
pass
return None
def guess_gas(web3, transaction):
return web3.eth.estimateGas(transaction) * 2
@curry
def fill_default(field, guess_func, web3, transaction):
if field in transaction and transaction[field] is not None:
return transaction
else:
guess_val = guess_func(web3, transaction)
return assoc(transaction, field, guess_val)
def default_transaction_fields_middleware(make_request, web3):
fill_default_from = fill_default('from', guess_from, web3)
fill_default_gas = fill_default('gas', guess_gas, web3)
def middleware(method, params):
# TODO send call to eth-tester without gas, and remove guess_gas entirely
if method == 'eth_call':
filled_transaction = pipe(
params[0],
fill_default_from,
fill_default_gas,
)
return make_request(method, [filled_transaction] + params[1:])
elif method in (
'eth_estimateGas',
'eth_sendTransaction',
):
filled_transaction = pipe(
params[0],
fill_default_from,
)
return make_request(method, [filled_transaction] + params[1:])
else:
return make_request(method, params)
return middleware | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/providers/eth_tester/middleware.py | middleware.py |
import operator
import random
import sys
from eth_tester.exceptions import (
BlockNotFound,
FilterNotFound,
TransactionNotFound,
ValidationError,
)
from eth_utils import (
decode_hex,
encode_hex,
is_null,
keccak,
)
from web3._utils.formatters import (
apply_formatter_if,
)
from web3._utils.toolz import (
compose,
curry,
excepts,
)
def not_implemented(*args, **kwargs):
raise NotImplementedError("RPC method not implemented")
@curry
def call_eth_tester(fn_name, eth_tester, fn_args, fn_kwargs=None):
if fn_kwargs is None:
fn_kwargs = {}
return getattr(eth_tester, fn_name)(*fn_args, **fn_kwargs)
def without_eth_tester(fn):
# workaround for: https://github.com/pytoolz/cytoolz/issues/103
# @functools.wraps(fn)
def inner(eth_tester, params):
return fn(params)
return inner
def without_params(fn):
# workaround for: https://github.com/pytoolz/cytoolz/issues/103
# @functools.wraps(fn)
def inner(eth_tester, params):
return fn(eth_tester)
return inner
@curry
def preprocess_params(eth_tester, params, preprocessor_fn):
return eth_tester, preprocessor_fn(params)
def static_return(value):
def inner(*args, **kwargs):
return value
return inner
def client_version(eth_tester, params):
# TODO: account for the backend that is in use.
from eth_tester import __version__
return "EthereumTester/{version}/{platform}/python{v.major}.{v.minor}.{v.micro}".format(
version=__version__,
v=sys.version_info,
platform=sys.platform,
)
@curry
def null_if_excepts(exc_type, fn):
return excepts(
exc_type,
fn,
static_return(None),
)
null_if_block_not_found = null_if_excepts(BlockNotFound)
null_if_transaction_not_found = null_if_excepts(TransactionNotFound)
null_if_filter_not_found = null_if_excepts(FilterNotFound)
null_if_indexerror = null_if_excepts(IndexError)
@null_if_indexerror
@null_if_block_not_found
def get_transaction_by_block_hash_and_index(eth_tester, params):
block_hash, transaction_index = params
block = eth_tester.get_block_by_hash(block_hash, full_transactions=True)
transaction = block['transactions'][transaction_index]
return transaction
@null_if_indexerror
@null_if_block_not_found
def get_transaction_by_block_number_and_index(eth_tester, params):
block_number, transaction_index = params
block = eth_tester.get_block_by_number(block_number, full_transactions=True)
transaction = block['transactions'][transaction_index]
return transaction
def create_log_filter(eth_tester, params):
filter_params = params[0]
filter_id = eth_tester.create_log_filter(**filter_params)
return filter_id
def get_logs(eth_tester, params):
filter_params = params[0]
logs = eth_tester.get_logs(**filter_params)
return logs
def _generate_random_private_key():
"""
WARNING: This is not a secure way to generate private keys and should only
be used for testing purposes.
"""
return encode_hex(bytes(bytearray((
random.randint(0, 255)
for _ in range(32)
))))
@without_params
def create_new_account(eth_tester):
return eth_tester.add_account(_generate_random_private_key())
def personal_send_transaction(eth_tester, params):
transaction, password = params
try:
eth_tester.unlock_account(transaction['from'], password)
transaction_hash = eth_tester.send_transaction(transaction)
finally:
eth_tester.lock_account(transaction['from'])
return transaction_hash
API_ENDPOINTS = {
'web3': {
'clientVersion': client_version,
'sha3': compose(
encode_hex,
keccak,
decode_hex,
without_eth_tester(operator.itemgetter(0)),
),
},
'net': {
'version': static_return('1'),
'peerCount': static_return(0),
'listening': static_return(False),
},
'eth': {
'protocolVersion': static_return('63'),
'syncing': static_return(False),
'coinbase': compose(
operator.itemgetter(0),
call_eth_tester('get_accounts'),
),
'mining': static_return(False),
'hashrate': static_return(0),
'gasPrice': static_return(1),
'accounts': call_eth_tester('get_accounts'),
'blockNumber': compose(
operator.itemgetter('number'),
call_eth_tester('get_block_by_number', fn_kwargs={'block_number': 'latest'}),
),
'getBalance': call_eth_tester('get_balance'),
'getStorageAt': not_implemented,
'getTransactionCount': call_eth_tester('get_nonce'),
'getBlockTransactionCountByHash': null_if_block_not_found(compose(
len,
operator.itemgetter('transactions'),
call_eth_tester('get_block_by_hash'),
)),
'getBlockTransactionCountByNumber': null_if_block_not_found(compose(
len,
operator.itemgetter('transactions'),
call_eth_tester('get_block_by_number'),
)),
'getUncleCountByBlockHash': null_if_block_not_found(compose(
len,
operator.itemgetter('uncles'),
call_eth_tester('get_block_by_hash'),
)),
'getUncleCountByBlockNumber': null_if_block_not_found(compose(
len,
operator.itemgetter('uncles'),
call_eth_tester('get_block_by_number'),
)),
'getCode': call_eth_tester('get_code'),
'sign': not_implemented,
'sendTransaction': call_eth_tester('send_transaction'),
'sendRawTransaction': call_eth_tester('send_raw_transaction'),
'call': call_eth_tester('call'), # TODO: untested
'estimateGas': call_eth_tester('estimate_gas'), # TODO: untested
'getBlockByHash': null_if_block_not_found(call_eth_tester('get_block_by_hash')),
'getBlockByNumber': null_if_block_not_found(call_eth_tester('get_block_by_number')),
'getTransactionByHash': null_if_transaction_not_found(
call_eth_tester('get_transaction_by_hash')
),
'getTransactionByBlockHashAndIndex': get_transaction_by_block_hash_and_index,
'getTransactionByBlockNumberAndIndex': get_transaction_by_block_number_and_index,
'getTransactionReceipt': null_if_transaction_not_found(compose(
apply_formatter_if(
compose(is_null, operator.itemgetter('block_number')),
static_return(None),
),
call_eth_tester('get_transaction_receipt'),
)),
'getUncleByBlockHashAndIndex': not_implemented,
'getUncleByBlockNumberAndIndex': not_implemented,
'getCompilers': not_implemented,
'compileLLL': not_implemented,
'compileSolidity': not_implemented,
'compileSerpent': not_implemented,
'newFilter': create_log_filter,
'newBlockFilter': call_eth_tester('create_block_filter'),
'newPendingTransactionFilter': call_eth_tester('create_pending_transaction_filter'),
'uninstallFilter': excepts(
FilterNotFound,
compose(
is_null,
call_eth_tester('delete_filter'),
),
static_return(False),
),
'getFilterChanges': null_if_filter_not_found(call_eth_tester('get_only_filter_changes')),
'getFilterLogs': null_if_filter_not_found(call_eth_tester('get_all_filter_logs')),
'getLogs': get_logs,
'getWork': not_implemented,
'submitWork': not_implemented,
'submitHashrate': not_implemented,
},
'db': {
'putString': not_implemented,
'getString': not_implemented,
'putHex': not_implemented,
'getHex': not_implemented,
},
'shh': {
'post': not_implemented,
'version': not_implemented,
'newIdentity': not_implemented,
'hasIdentity': not_implemented,
'newGroup': not_implemented,
'addToGroup': not_implemented,
'newFilter': not_implemented,
'uninstallFilter': not_implemented,
'getFilterChanges': not_implemented,
'getMessages': not_implemented,
},
'admin': {
'addPeer': not_implemented,
'datadir': not_implemented,
'nodeInfo': not_implemented,
'peers': not_implemented,
'setSolc': not_implemented,
'startRPC': not_implemented,
'startWS': not_implemented,
'stopRPC': not_implemented,
'stopWS': not_implemented,
},
'debug': {
'backtraceAt': not_implemented,
'blockProfile': not_implemented,
'cpuProfile': not_implemented,
'dumpBlock': not_implemented,
'gtStats': not_implemented,
'getBlockRLP': not_implemented,
'goTrace': not_implemented,
'memStats': not_implemented,
'seedHashSign': not_implemented,
'setBlockProfileRate': not_implemented,
'setHead': not_implemented,
'stacks': not_implemented,
'startCPUProfile': not_implemented,
'startGoTrace': not_implemented,
'stopCPUProfile': not_implemented,
'stopGoTrace': not_implemented,
'traceBlock': not_implemented,
'traceBlockByNumber': not_implemented,
'traceBlockByHash': not_implemented,
'traceBlockFromFile': not_implemented,
'traceTransaction': not_implemented,
'verbosity': not_implemented,
'vmodule': not_implemented,
'writeBlockProfile': not_implemented,
'writeMemProfile': not_implemented,
},
'miner': {
'makeDAG': not_implemented,
'setExtra': not_implemented,
'setGasPrice': not_implemented,
'start': not_implemented,
'startAutoDAG': not_implemented,
'stop': not_implemented,
'stopAutoDAG': not_implemented,
},
'personal': {
'ecRecover': not_implemented,
'importRawKey': call_eth_tester('add_account'),
'listAccounts': call_eth_tester('get_accounts'),
'lockAccount': excepts(
ValidationError,
compose(static_return(True), call_eth_tester('lock_account')),
static_return(False),
),
'newAccount': create_new_account,
'unlockAccount': excepts(
ValidationError,
compose(static_return(True), call_eth_tester('unlock_account')),
static_return(False),
),
'sendTransaction': personal_send_transaction,
'sign': not_implemented,
},
'testing': {
'timeTravel': call_eth_tester('time_travel'),
},
'txpool': {
'content': not_implemented,
'inspect': not_implemented,
'status': not_implemented,
},
'evm': {
'mine': call_eth_tester('mine_blocks'),
'revert': call_eth_tester('revert_to_snapshot'),
'snapshot': call_eth_tester('take_snapshot'),
},
} | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/providers/eth_tester/defaults.py | defaults.py |
from web3.providers import (
BaseProvider,
)
from .middleware import (
default_transaction_fields_middleware,
ethereum_tester_middleware,
)
class AsyncEthereumTesterProvider(BaseProvider):
"""This is a placeholder.
For now its purpose is to provide an awaitable request function
for testing the async api execution.
"""
def __init__(self):
self.eth_tester = EthereumTesterProvider()
async def make_request(self, method, params):
return self.eth_tester.make_request(method, params)
class EthereumTesterProvider(BaseProvider):
middlewares = [
default_transaction_fields_middleware,
ethereum_tester_middleware,
]
ethereum_tester = None
api_endpoints = None
def __init__(self, ethereum_tester=None, api_endpoints=None):
# do not import eth_tester until runtime, it is not a default dependency
from eth_tester import EthereumTester
from eth_tester.backends.base import BaseChainBackend
if ethereum_tester is None:
self.ethereum_tester = EthereumTester()
elif isinstance(ethereum_tester, EthereumTester):
self.ethereum_tester = ethereum_tester
elif isinstance(ethereum_tester, BaseChainBackend):
self.ethereum_tester = EthereumTester(ethereum_tester)
else:
raise TypeError(
"Expected ethereum_tester to be of type `eth_tester.EthereumTester` or "
"a subclass of `eth_tester.backends.base.BaseChainBackend`, "
f"instead received {type(ethereum_tester)}. "
"If you would like a custom eth-tester instance to test with, see the "
"eth-tester documentation. https://github.com/ethereum/eth-tester."
)
if api_endpoints is None:
# do not import eth_tester derivatives until runtime, it is not a default dependency
from .defaults import API_ENDPOINTS
self.api_endpoints = API_ENDPOINTS
else:
self.api_endpoints = api_endpoints
def make_request(self, method, params):
namespace, _, endpoint = method.partition('_')
try:
delegator = self.api_endpoints[namespace][endpoint]
except KeyError:
return {
"error": "Unknown RPC Endpoint: {0}".format(method),
}
try:
response = delegator(self.ethereum_tester, params)
except NotImplementedError:
return {
"error": "RPC Endpoint has not been implemented: {0}".format(method),
}
else:
return {
'result': response,
}
def isConnected(self):
return True | 0x-web3 | /0x-web3-5.0.0a5.tar.gz/0x-web3-5.0.0a5/web3/providers/eth_tester/main.py | main.py |
### Здесь есть функции:
- `upload_file_url(url, expires, secret)`: Загрузка файла через ссылку, url=ссылка, expires=время хранения файла в часах(можно оставить пустым), secret=удлинняет ссылку(можно оставить пустым).
- `upload_file_path(path, expires, secret)`: Тоже самое что и upload_file_url, только нужно указывать путь к файлу.
- `delete_file(token, url)`: Удаляет файл, token=токен, url=ссылкана файл в 0x0.
- `change_expires(url, expires, token)`: Изменяет время хранения файла, token=токен, url=ссылка на файл в 0x0, expires=новое время хранение файла в часах. | 0x0-python | /0x0-python-0.5.tar.gz/0x0-python-0.5/README.md | README.md |
# Aliyun DNS plugin for autocert project
This plugin provides an automated `perform_dns01()` for
[autocert](https://github.com/Smart-Hypercube/autocert/tree/master/letsencrypt#apply-for-some-certificates).
```python3
# other kinds of credential, e.g. StsTokenCredential, can be used as well
credential = AccessKeyCredential(ACCESS_KEY_ID, ACCESS_KEY_SECRET)
with AliyunDNS(credential) as aliyun:
result = le.order(k2, domains, aliyun.perform_dns01)
# added DNS records will be removed automatically
```
| 0x01-autocert-dns-aliyun | /0x01-autocert-dns-aliyun-0.1.tar.gz/0x01-autocert-dns-aliyun-0.1/README.md | README.md |
__all__ = ['Change', 'Cubic', 'Hash', 'Item', 'RemovedItem', 'Version', 'VersionData']
from dataclasses import dataclass
from itertools import islice
from hashlib import sha3_256
import struct
from typing import Dict, Iterable, List, Mapping, Optional, Set, Tuple, Union
import msgpack
import requests
Hash = bytes
@dataclass(frozen=True)
class Item:
meta: bytes
blocks: List[Hash]
@dataclass(frozen=True)
class RemovedItem:
def __bool__(self):
return False
Change = Union[Item, RemovedItem]
@dataclass(frozen=True)
class Version:
checksum: bytes
timestamp: int
timestamp_ns: int
@dataclass(frozen=True)
class VersionData:
checksum: bytes
timestamp: int
timestamp_ns: int
items: Dict[bytes, Item]
def _validate(version: VersionData) -> None:
hash = sha3_256(struct.pack('>qII', version.timestamp, version.timestamp_ns, len(version.items)))
for path in sorted(version.items):
item = version.items[path]
hash.update(struct.pack('>III', len(path), len(item.meta), len(item.blocks)))
hash.update(path)
hash.update(item.meta)
for h in item.blocks:
hash.update(h)
if hash.digest() != version.checksum:
raise ValueError('VersionData validation failed')
ENDPOINT = 'https://api.cubic.0x01.me'
class Cubic:
class Error(Exception):
pass
def __init__(self, tree, token, endpoint=ENDPOINT, session=None, timeout=60):
self._tree = tree
self._endpoint = endpoint
self._session = session or requests.Session()
self._session.auth = tree, token
self._timeout = timeout
self._limits = self._call('/v3/limits')
def _call(self, api, payload=None):
r = self._session.post(self._endpoint + api, data=msgpack.packb(payload), timeout=self._timeout)
if not r.ok:
raise self.Error(r)
return msgpack.unpackb(r.content)
def dedup_blocks(self, hashes: Iterable[Hash]) -> Set[Hash]:
"""Filter blocks that need to be uploaded."""
hashes = list(hashes)
result = set()
limit = self._limits['dedupBlocks/count']
for i in range(0, len(hashes), limit):
result.update(self._call('/v3/dedupBlocks', hashes[i:i+limit]))
return result
def put_blocks(self, blocks: Iterable[bytes]) -> None:
"""Upload all blocks.
You may want to use dedup_put_blocks instead.
"""
buffer = []
size = 0
limit_count = self._limits['putBlocks/count']
limit_size = self._limits['putBlocks/size']
for i in blocks:
if len(buffer) + 1 > limit_count or size + len(i) > limit_size:
if buffer:
self._call('/v3/putBlocks', buffer)
buffer = []
size = 0
buffer.append(i)
size += len(i)
self._call('/v3/putBlocks', buffer)
def dedup_put_blocks(self, blocks: Mapping[Hash, bytes]) -> None:
"""Only upload necessary blocks."""
self.put_blocks(blocks[i] for i in self.dedup_blocks(blocks))
def put_block(self, block: bytes) -> None:
"""Upload one block."""
self._call('/v3/putBlocks', [block])
def get_blocks(self, hashes: Iterable[Hash]) -> Dict[Hash, bytes]:
"""Download all blocks."""
hashes = set(hashes)
result = {}
limit = self._limits['getBlocks/count']
while hashes:
buffer = list(islice(hashes, limit))
for k, v in self._call('/v3/getBlocks', buffer).items():
hashes.discard(k)
result[k] = v
return result
def get_block(self, hash: Hash) -> bytes:
"""Download one block."""
return self._call('/v3/getBlocks', [hash])[hash]
def list_versions(self) -> List[Version]:
"""List all versions (most recent version last)."""
return [Version(*i) for i in self._call('/v3/listVersions')]
def diff_versions(self, from_: Union[None, Version, VersionData], to: Version) -> Dict[bytes, Change]:
"""Get changes between two versions."""
def f(x: Optional[Iterable]) -> Change:
return Item(*x) if x else RemovedItem()
payload = (from_.checksum if from_ else None), to.checksum
return {k: f(v) for k, v in self._call('/v3/diffVersions', payload).items()}
def get_version(self, version: Version, base: Optional[VersionData] = None) -> VersionData:
"""Get items of a version.
If base is provided, only download changes between two versions."""
items = base.items.copy() if base else {}
for k, v in self.diff_versions(base, version).items():
if v:
items[k] = v
else:
del items[k]
result = VersionData(version.checksum, version.timestamp, version.timestamp_ns, items)
_validate(result)
return result
def update_version(self, changes: Mapping[bytes, Change], base: Optional[VersionData] = None) -> VersionData:
"""Create a new version using an old version and changes."""
def f(x: Change) -> Optional[Tuple]:
return (x.meta, x.blocks) if x else None
payload = (base.checksum if base else None), {k: f(v) for k, v in changes.items()}
result = self._call('/v3/updateVersion', payload)
_validate(result)
return result | 0x01-cubic-sdk | /0x01-cubic-sdk-3.0.0.tar.gz/0x01-cubic-sdk-3.0.0/cubic/__init__.py | __init__.py |
__all__ = ['LetsEncrypt', 'LetsEncryptStaging']
import acme.challenges
import acme.client
import acme.crypto_util
import acme.errors
import acme.messages
import josepy
import OpenSSL
class LetsEncrypt:
DIRECTORY_URL = 'https://acme-v02.api.letsencrypt.org/directory'
def __init__(self, key: str, uri=None, *, phone=None, email=None):
self.uri = uri
if uri is None:
self.account = None
else:
self.account = acme.messages.RegistrationResource(body={}, uri=uri)
# noinspection PyTypeChecker
self.key = josepy.JWK.load(key.encode('ascii'))
self.session = acme.client.ClientNetwork(self.key, self.account)
directory_json = self.session.get(self.DIRECTORY_URL).json()
directory = acme.messages.Directory.from_json(directory_json)
self.acme = acme.client.ClientV2(directory, self.session)
if self.account is None:
message = acme.messages.NewRegistration.from_data(
phone=phone,
email=email,
terms_of_service_agreed=True,
)
self.account = self.acme.new_account(message)
self.uri = self.account.uri
def order(self, key: str, domains, perform_dns01):
def select_dns01(challenges):
for i in challenges:
if isinstance(i.chall, acme.challenges.DNS01):
return i
raise ValueError('DNS-01 not offered')
csr = acme.crypto_util.make_csr(key, domains)
order = self.acme.new_order(csr)
for auth in order.authorizations:
challenge = select_dns01(auth.body.challenges)
response, validation = challenge.response_and_validation(self.key)
name = auth.body.identifier.value
domain = challenge.validation_domain_name(name)
perform_dns01(domain, validation)
self.acme.answer_challenge(challenge, response)
return self.acme.poll_and_finalize(order).fullchain_pem
def revoke(self, fullchain: str):
# noinspection PyTypeChecker
certificate = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, fullchain)
certificate = josepy.ComparableX509(certificate)
try:
return self.acme.revoke(certificate, 0)
except acme.errors.ConflictError:
pass
class LetsEncryptStaging(LetsEncrypt):
DIRECTORY_URL = 'https://acme-staging-v02.api.letsencrypt.org/directory' | 0x01-letsencrypt | /0x01-letsencrypt-0.1.tar.gz/0x01-letsencrypt-0.1/letsencrypt.py | letsencrypt.py |
# Let's Encrypt library for human beings
Note: The example below used the Let's Encrypt
[staging environment](https://letsencrypt.org/docs/staging-environment/).
Replace `letsencrypt.LetsEncryptStaging` with `letsencrypt.LetsEncrypt`
for production.
## Create account key
```bash
openssl genrsa -out account.key 4096
```
**WARNING: Keep the key safe!**
## Register on Let's Encrypt
```python3
with open('account.key') as f:
account_key = f.read()
# phone, email, or both can be omitted
le = letsencrypt.LetsEncryptStaging(account_key, phone='...', email='...')
uri = le.uri
print('Please save your accound ID:')
print(uri)
```
## After you have an account
```python3
le = letsencrypt.LetsEncryptStaging(account_key, uri)
```
## Apply for some certificates!
```bash
openssl genrsa -out example.com.key 4096
```
**WARNING: Keep the key safe!**
```python3
# just an example, please use an automated function instead
def perform_dns01(domain, validation):
print('Please add a TXT record:')
print('domain:', domain)
print('value:', validation)
input('Press Enter after finished...')
with open('example.com.key') as f:
cert_key = f.read()
with open('1.crt', 'w') as f:
f.write(le.order(cert_key, ['example.com'], perform_dns01))
with open('2.crt', 'w') as f:
f.write(le.order(cert_key, ['example.com', 'a.example.com'], perform_dns01))
with open('3.crt', 'w') as f:
f.write(le.order(cert_key, ['a.b.example.com', '*.c.example.com'], perform_dns01))
```
| 0x01-letsencrypt | /0x01-letsencrypt-0.1.tar.gz/0x01-letsencrypt-0.1/README.md | README.md |
``0x10c-asm`` assembly compiler for Notch's DCPU-16
---------------------------------------------------
Install from PyPI:
==================
pip install 0x10c-asm
Usage:
======
$ 0x10c-asm.py -h
usage: ``0x10-asm.py [-h] IN [OUT]``
A simple Python-based DCPU assembly compiler
positional arguments:
``IN`` file path of the file containing the assembly code
``OUT`` file path where to store the binary code
optional arguments:
-h, --help show this help message and exit
| 0x10c-asm | /0x10c-asm-0.0.2.tar.gz/0x10c-asm-0.0.2/README.rst | README.rst |
import re
import argparse
import sys
import struct
opcodes = [
'SET', 'ADD', 'SUB', 'MUL', 'DIV', 'MOD', 'SHL', 'SHR', 'AND', 'BOR',
'XOR', 'IFE', 'IFN', 'IFG', 'IFB',
]
nonbasic_opcodes = [
'JSR'
]
pointers = [
'A', 'B', 'C', 'X', 'Y', 'Z', 'I', 'J',
'POP', 'PEEK', 'PUSH', 'SP', 'PC', 'O',
]
oc = '|'.join(opcodes) # (SET|ADD|SUB|...)
noc = '|'.join(nonbasic_opcodes)
deref_pattern = '\[\s*%s\s*\]' # [ ? ]
hexa = '0x[0-9a-d]{1,4}' # 0xbaba1
hexa_deref = deref_pattern % hexa # [ 0xbaba1 ]
reg_pointers = '|'.join(pointers) # A|B|C
reg_deref = '|'.join(deref_pattern % reg for reg in pointers[:8]) # [A]|[B]
hexa_plus_reg = '(%s)\s*\+\s*(%s)' % (hexa, '|'.join(pointers[:8])) # 0xb1 + I
offset = deref_pattern % hexa_plus_reg # [ 0xb1 + I ]
label = '\w+'
dec = '\d+'
op = '|'.join(
'(%s)' % x for x in
[hexa, hexa_deref, reg_pointers, reg_deref, offset, dec, label]
)
l_def = ':\w+'
row_pattern = '^\s*(%s)?\s*(((%s)\s+(%s)\s*,\s*(%s))|((%s)\s+(%s)))?\s*(;.*)?$'
re_row = re.compile(row_pattern % (l_def, oc, op, op, noc, op))
def emit_from_str(code):
for line in code.split('\n'):
parsed_line = re_row.match(line)
if parsed_line is None:
print 'error found on line: %s' % line
exit(1)
for token in emit_from_line(parsed_line.groups()):
yield token
def emit_from_line(line):
if line[0]:
yield ('LABEL_DEF', line[0][1:])
if line[3]:
yield ('OPCODE', line[3])
for token in emit_from_op(line[4:14]):
yield token
for token in emit_from_op(line[14:24]):
yield token
if line[24]:
yield ('OPCODE_NB', line[25])
for token in emit_from_op(line[26:36]):
yield token
if line[36]:
yield ('COMMENT', line[36][1:])
def emit_from_op(op):
if op[1]:
yield ('CONST', int(op[1], 0))
elif op[2]:
yield ('CONST_DEREF', int(op[2][1:-1], 0))
elif op[3]:
yield ('REGISTRY', op[3])
elif op[4]:
yield ('REGISTRY_DEREF', op[4][1:-1])
elif op[5]:
yield ('OFFSET', (int(op[6], 0), op[7]))
elif op[8]:
yield ('CONST', int(op[8]))
elif op[9]:
yield ('LABEL_USE', op[9])
def compile(source):
result = []
emitter = emit_from_str(source)
labels = {}
labels_to_update = {}
to_append = []
def get_i(o_ttype, o_token):
if o_ttype == 'CONST':
i = o_token + 0x20
if o_token > 0x1f:
i = 0x1f
to_append.append(o_token)
elif o_ttype == 'CONST_DEREF':
i = 0x1e
to_append.append(o_token)
elif o_ttype == 'REGISTRY':
i = pointers.index(o_token)
if i >= 8:
i += 0x10
elif o_ttype == 'REGISTRY_DEREF':
i = pointers.index(o_token) + 0x08
elif o_ttype == 'OFFSET':
offset, reg = o_token
i = pointers.index(reg) + 0x10
to_append.append(offset)
elif o_ttype == 'LABEL_USE':
i = 0x1f
addr = labels.get(o_token)
if addr is None:
pos = len(result) + 1
labels_to_update.setdefault(o_token, []).append(pos)
to_append.append(addr)
return i
for ttype, token in emitter:
to_append[:] = []
if ttype == 'LABEL_DEF':
addr = labels[token] = len(result)
for pos in labels_to_update.get(token, []):
result[pos] = addr
elif ttype == 'OPCODE':
current_word = opcodes.index(token) + 1
shift = 0
for o_ttype, o_token in [emitter.next(), emitter.next()]:
i = get_i(o_ttype, o_token)
current_word += i << (4 + 6 * shift)
shift += 1
result.append(current_word)
result.extend(to_append)
elif ttype == 'OPCODE_NB':
index = nonbasic_opcodes.index(token) + 1
current_word = index << 4
o_ttype, o_token = emitter.next()
i = get_i(o_ttype, o_token)
current_word += i << 10
result.append(current_word)
result.extend(to_append)
return result
def pprint(words):
f = '%0.4x'
wrds = words
if len(words) % 8:
wrds = words + [0] * (8 - len(words) % 8)
for x in range(0, len(wrds), 8):
print f % x + ':', ' '.join(f % w for w in wrds[x:x + 8])
def main():
parser = argparse.ArgumentParser(
description='A simple Python-based DCPU assembly compiler'
)
parser.add_argument(
'source', metavar='IN', type=str, nargs=1,
help='file path of the file containing the assembly code'
)
parser.add_argument(
'destination', metavar='OUT', type=str, nargs='?',
help='file path where to store the binary code'
)
args = parser.parse_args()
c = compile(open(args.source[0]).read())
if args.destination is None:
return pprint(c)
assert sys.byteorder == 'little'
out = open(args.destination, 'wb')
c = [struct.pack('>H', b) for b in c]
out.write(''.join(c))
out.close()
if __name__ == '__main__':
main() | 0x10c-asm | /0x10c-asm-0.0.2.tar.gz/0x10c-asm-0.0.2/0x10c-asm.py | 0x10c-asm.py |
### Contents
- [What is this?](#what-is-this)
- [How do I install it?](#how-do-i-install-it)
- [How do I use it?](#how-do-i-use-it)
- [Extensions](#extensions)
- [Reference](#reference)
### What is this?<a id="what-is-this"></a>
The goal of this module is to help write code that generates code.
Focus is placed on enabling the user to easily describe,
build and reason about code structures rapidly.
### How do I install it?<a id="how-do-i-install-it"></a>
### From PyPI:
`pip install 0xf0f-codenode`
#### From GitHub:
`pip install git+https://github.com/0xf0f/codenode`
### How do I use it?<a id="how-do-i-use-it"></a>
Like the `json` and
`pickle` modules, `dump` and `dumps` are used to generate output.
Code can be built using any tree of iterables containing strings,
indentation nodes and newline nodes.
For example, the built-in `line` function returns a tuple:
```python
from codenode import indentation, newline
def line(content):
return indentation, content, newline
```
Which we can combine with `indent` and `dedent` nodes:
```python
from codenode import line, indent, dedent, dumps
def counting_function(count_from, count_to):
return [
line(f'def count_from_{count_from}_to_{count_to}():'),
indent,
[
line(f'print({i})')
for i in range(count_from, count_to)
],
dedent,
]
print(dumps(counting_function(0, 5)))
```
Which outputs:
```
def count_from_0_to_5():
print(0)
print(1)
print(2)
print(3)
print(4)
```
But what if we want to count to a really big number, like
1,000,000,000,000,000?
It would be inefficient to store all those lines in memory
at once. We can use a generator to break them down into
individual parts instead:
```python
from codenode import indent, dedent, newline, indentation, dump
def counting_function_generator(count_from, count_to):
yield indentation
yield 'def count_from_', str(count_from), '_to_', str(count_to), '():'
yield newline
yield indent
for i in range(count_from, count_to):
yield indentation, 'print(', str(i), ')', newline
yield dedent
with open('code.py', 'w') as file:
dump(counting_function_generator(0, 1_000_000_000_000_000), file)
```
We can also build a class with an `__iter__` method:
```python
from codenode import line, indent, dedent, dump
class CountingFunction:
def __init__(self, count_from, count_to):
self.count_from = count_from
self.count_to = count_to
def __iter__(self):
yield line(
f'def count_from_{self.count_from}_to_{self.count_to}():'
)
yield indent
for i in range(self.count_from, self.count_to):
yield line(f'print({i})')
yield dedent
with open('code.py', 'w') as file:
dump(CountingFunction(0, 1_000_000), file)
```
Or a more generalized function class:
```python
class Function:
def __init__(self, name, *args):
self.name = name
self.args = args
self.children = []
def __iter__(self):
arg_string = ', '.join(self.args)
yield line(f'def {self.name}({arg_string}):')
yield indent
yield self.children
yield dedent
class CountingFunction(Function):
def __init__(self, count_from, count_to):
super().__init__(f'count_from_{count_from}_to_{count_to}')
for i in range(count_from, count_to):
self.children.append(line(f'print({i})'))
```
Leveraging python's iteration protocol like this allows:
- Mixing and matching whatever fits the use case to maximize tradeoffs,
such as using generators for their memory efficiency,
custom iterable classes for their semantics, or plain old lists and
tuples for their simplicity.
- Taking advantage of existing modules that offer tooling for
iterables, such as itertools.
- Building higher level structures from as many iterable building blocks
as desired.
### Extensions
Module behaviour can be extended by overriding methods of the
`codenode.writer.Writer` and `codenode.writer.WriterStack` classes. An
example of this can be seen in the `codenode.debug.debug_patch`
function. The variable `codenode.default_writer_type` can be used to
replace the `Writer` type used in `dump` and `dumps` with a custom one.
Some modules with helper classes and functions are also provided:
- [codenode_utilities](codenode_utilities/README.md)
- contains general language agnostic helper functions and classes
[comment]: <> ( - codenode_python)
[comment]: <> ( - contains helper classes and functions for generating python code)
[comment]: <> ( - codenode_cpp)
[comment]: <> ( - contains helper classes and functions for generating c++ code)
[comment]: <> ( - codenode_legacy)
[comment]: <> ( - helpers for code that relies on the old codenode API (below version 1.0))
[comment]: <> ( - uses a previous, entirely different approach to nodes)
### Reference<a id="reference"></a>
> **Note**
> This section of the readme was generated using codenode itself.
>
> See docs/generate_readme.py
#### Contents
- [codenode.dump](#codenodedump)
- [codenode.dumps](#codenodedumps)
- [codenode.line](#codenodeline)
- [codenode.indent](#codenodeindent)
- [codenode.dedent](#codenodededent)
- [codenode.newline](#codenodenewline)
- [codenode.indentation](#codenodeindentation)
- [codenode.lines](#codenodelines)
- [codenode.empty_lines](#codenodeempty_lines)
- [codenode.indented](#codenodeindented)
- [codenode.default_writer_type](#codenodedefault_writer_type)
- [codenode.writer.Writer](#codenodewriterwriter)
- [codenode.writer.WriterStack](#codenodewriterwriterstack)
- [codenode.nodes.newline.Newline](#codenodenodesnewlinenewline)
- [codenode.nodes.depth_change.DepthChange](#codenodenodesdepth_changedepthchange)
- [codenode.nodes.depth_change.RelativeDepthChange](#codenodenodesdepth_changerelativedepthchange)
- [codenode.nodes.depth_change.AbsoluteDepthChange](#codenodenodesdepth_changeabsolutedepthchange)
- [codenode.nodes.indentation.Indentation](#codenodenodesindentationindentation)
- [codenode.nodes.indentation.RelativeIndentation](#codenodenodesindentationrelativeindentation)
- [codenode.nodes.indentation.AbsoluteIndentation](#codenodenodesindentationabsoluteindentation)
- [codenode.nodes.indentation.CurrentIndentation](#codenodenodesindentationcurrentindentation)
- [codenode.debug.debug_patch](#codenodedebugdebug_patch)
---
### codenode.dump<a id="codenodedump"></a>
> ```python
> def dump(node, stream, *, indentation=' ', newline='\n', depth=0, debug=False): ...
> ````
>
> Process and write out a node tree to a stream.
>
>
> #### Parameters
> * > ***node:***
> > Base node of node tree.
> * > ***stream:***
> > An object with a 'write' method.
> * > ***indentation:***
> > String used for indents in the output.
> * > ***newline:***
> > String used for newlines in the output.
> * > ***depth:***
> > Base depth (i.e. number of indents) to start at.
> * > ***debug:***
> > If True, will print out extra info when an error
> > occurs to give a better idea of which node caused it.
---
### codenode.dumps<a id="codenodedumps"></a>
> ```python
> def dumps(node, *, indentation=' ', newline='\n', depth=0, debug=False) -> str: ...
> ````
>
> Process and write out a node tree as a string.
>
>
> #### Parameters
> * > ***node:***
> > Base node of node tree.
> * > ***indentation:***
> > String used for indents in the output.
> * > ***newline:***
> > String used for newlines in the output.
> * > ***depth:***
> > Base depth (i.e. number of indents) to start at.
> * > ***debug:***
> > If True, will print out extra info when an error
> > occurs to give a better idea of which node caused it.
> >
> #### Returns
> * > String representation of node tree.
>
---
### codenode.line<a id="codenodeline"></a>
> ```python
> def line(content: 'T') -> 'tuple[Indentation, T, Newline]': ...
> ````
>
> Convenience function that returns a tuple containing
> an indentation node, line content and a newline node.
>
>
> #### Parameters
> * > ***content:***
> > content of line
> #### Returns
> * > tuple containing an indentation node, line content and
> a newline node.
>
---
### codenode.indent<a id="codenodeindent"></a>
> A node representing a single increase in indentation level.
---
### codenode.dedent<a id="codenodededent"></a>
> A node representing a single decrease in indentation level.
---
### codenode.newline<a id="codenodenewline"></a>
> A placeholder node for line terminators.
---
### codenode.indentation<a id="codenodeindentation"></a>
> A placeholder node for indentation whitespace at the start of a line.
---
### codenode.lines<a id="codenodelines"></a>
> ```python
> def lines(*items) -> tuple[tuple, ...]: ...
> ````
>
> Convenience function that returns a tuple of lines,
> where each argument is the content of one line.
>
>
> #### Parameters
> * > ***items:***
> > contents of lines
> #### Returns
> * > tuple of lines
>
---
### codenode.empty_lines<a id="codenodeempty_lines"></a>
> ```python
> def empty_lines(count: int) -> 'tuple[Newline, ...]': ...
> ````
>
> Convenience function that returns a tuple of newline nodes.
>
>
> #### Parameters
> * > ***count:***
> > Number of newlines.
> #### Returns
> * > Tuple of newlines.
>
---
### codenode.indented<a id="codenodeindented"></a>
> ```python
> def indented(*nodes) -> tuple: ...
> ````
>
> Convenience function that returns a tuple containing an indent node,
> some inner nodes, and a dedent node.
>
>
> #### Parameters
> * > ***nodes:***
> > inner nodes
> #### Returns
> * > tuple containing an indent node, inner nodes, and a dedent node.
>
---
### codenode.default_writer_type<a id="codenodedefault_writer_type"></a>
> Default Writer type used in codenode.dump and codenode.dumps.
---
### codenode.writer.Writer<a id="codenodewriterwriter"></a>
> ```python
> class Writer: ...
> ```
>
> Processes node trees into strings then writes out the result.
>
> Each instance is intended to be used once then discarded.
> After a single call to either dump or dumps, the Writer
> instance is no longer useful.
#### Methods
> ##### `__init__`
> ```python
> class Writer:
> def __init__(self, node: 'NodeType', *, indentation=' ', newline='\n', depth=0): ...
> ````
>
>
> #### Parameters
> * > ***node:***
> > Base node of node tree.
> * > ***indentation:***
> > Initial string used for indents in the output.
> * > ***newline:***
> > Initial string used for newlines in the output.
> * > ***depth:***
> > Base depth (i.e. number of indents) to start at.
> ##### `process_node`
> ```python
> class Writer:
> def process_node(self, node) -> 'Iterable[str]': ...
> ````
>
> Yield strings representing a node and/or apply any of its
> associated side effects to the writer
>
> for example:
>
> - yield indentation string when an indentation node is encountered
>
> - increase the current writer depth if an indent is encountered
>
> - append an iterator to the stack when an iterable is encountered
>
>
> #### Parameters
> * > ***node:***
> > node to be processed
> #### Returns
> * > strings of text chunks representing the node
>
> ##### `dump_iter`
> ```python
> class Writer:
> def dump_iter(self) -> 'Iterable[str]': ...
> ````
>
> Process and write out a node tree as an iterable of
> string chunks.
>
>
> #### Returns
> * > Iterable of string chunks.
>
> ##### `dump`
> ```python
> class Writer:
> def dump(self, stream): ...
> ````
>
> Process and write out a node tree to a stream.
>
>
> #### Parameters
> * > ***stream:***
> > An object with a 'write' method.
> ##### `dumps`
> ```python
> class Writer:
> def dumps(self): ...
> ````
>
> Process and write out a node tree as a string.
>
>
> #### Returns
> * > String representation of node tree.
>
#### Attributes
> ***node:***
> Base node of node tree
> ***stack:***
> WriterStack used to iterate over the node tree
> ***indentation:***
> Current string used for indents in the output
> ***newline:***
> Current string used for line termination in the output
> ***depth:***
> Current output depth (i.e. number of indents)
---
### codenode.writer.WriterStack<a id="codenodewriterwriterstack"></a>
> ```python
> class WriterStack: ...
> ```
>
> A stack of iterators.
> Used by the Writer class to traverse node trees.
>
> Each instance is intended to be used once then discarded.
#### Methods
> ##### `push`
> ```python
> class WriterStack:
> def push(self, node: 'NodeType'): ...
> ````
>
> Converts a node to an iterator then places it at
> the top of the stack.
>
>
> #### Parameters
> * > ***node:***
> > iterable node
> ##### `__iter__`
> ```python
> class WriterStack:
> def __iter__(self) -> 'Iterable[NodeType]': ...
> ````
>
> Continually iterates the top iterator in the stack's items,
> yielding each result then popping each iterator off when they
> are exhausted.
>
#### Attributes
> ***items:*** collections.deque -
> Current items in the stack.
---
### codenode.nodes.newline.Newline<a id="codenodenodesnewlinenewline"></a>
> ```python
> class Newline: ...
> ```
>
> Nodes that represent the end of a line.
---
### codenode.nodes.depth_change.DepthChange<a id="codenodenodesdepth_changedepthchange"></a>
> ```python
> class DepthChange: ...
> ```
>
> Nodes that represent a change in indentation depth.
#### Methods
> ##### `new_depth_for`
> ```python
> class DepthChange:
> def new_depth_for(self, depth: int) -> int: ...
> ````
>
> Method used to calculate the new depth based on the current one.
>
>
> #### Parameters
> * > ***depth:***
> > Current depth.
> #### Returns
> * > New depth.
>
---
### codenode.nodes.depth_change.RelativeDepthChange<a id="codenodenodesdepth_changerelativedepthchange"></a>
> ```python
> class RelativeDepthChange: ...
> ```
>
> Nodes that represent a change in indentation depth relative to the
> current depth by some preset amount.
#### Methods
> ##### `__init__`
> ```python
> class RelativeDepthChange:
> def __init__(self, offset: int): ...
> ````
>
>
> #### Parameters
> * > ***offset:***
> > Amount by which to increase/decrease depth.
#### Attributes
> ***offset:***
> Amount by which to increase/decrease depth when this node is
> processed.
---
### codenode.nodes.depth_change.AbsoluteDepthChange<a id="codenodenodesdepth_changeabsolutedepthchange"></a>
> ```python
> class AbsoluteDepthChange: ...
> ```
>
> Nodes that represent a change in indentation depth without taking
> the current depth into account.
#### Methods
> ##### `__init__`
> ```python
> class AbsoluteDepthChange:
> def __init__(self, value: int): ...
> ````
>
>
> #### Parameters
> * > ***value:***
> > Value to set depth to.
#### Attributes
> ***value:***
> Value to which depth will be set to when this node is
> processed.
---
### codenode.nodes.indentation.Indentation<a id="codenodenodesindentationindentation"></a>
> ```python
> class Indentation: ...
> ```
>
> Nodes that represent indentation whitespace at the start of a line.
#### Methods
> ##### `indents_for`
> ```python
> class Indentation:
> def indents_for(self, depth: int) -> int: ...
> ````
>
>
> #### Parameters
> * > ***depth:***
> > Current depth.
> #### Returns
> * > Number of indents to include in whitespace when this
> node is processed.
>
---
### codenode.nodes.indentation.RelativeIndentation<a id="codenodenodesindentationrelativeindentation"></a>
> ```python
> class RelativeIndentation: ...
> ```
>
> Nodes that represent indentation whitespace at the start of a line,
> with a number of indents relative to the current depth by some
> preset amount.
#### Methods
> ##### `__init__`
> ```python
> class RelativeIndentation:
> def __init__(self, offset: int): ...
> ````
>
>
> #### Parameters
> * > ***offset:***
> > Amount of indents relative to the current depth.
#### Attributes
> ***offset:***
> Amount of indents relative to the current depth that will be
> output when this node is processed.
---
### codenode.nodes.indentation.AbsoluteIndentation<a id="codenodenodesindentationabsoluteindentation"></a>
> ```python
> class AbsoluteIndentation: ...
> ```
>
> Nodes that represent indentation whitespace at the start of a line,
> with a number of indents independent of the current depth.
#### Methods
> ##### `__init__`
> ```python
> class AbsoluteIndentation:
> def __init__(self, value: int): ...
> ````
>
>
> #### Parameters
> * > ***value:***
> > Amount of indents.
#### Attributes
> ***value:***
> Amount of indents that will be output when this node is processed.
---
### codenode.nodes.indentation.CurrentIndentation<a id="codenodenodesindentationcurrentindentation"></a>
> ```python
> class CurrentIndentation: ...
> ```
>
> Nodes that represent indentation whitespace at the start of a line,
> with a number of indents equal to the current depth.
---
### codenode.debug.debug_patch<a id="codenodedebugdebug_patch"></a>
> ```python
> def debug_patch(writer_type: typing.Type[Writer]) -> typing.Type[Writer]: ...
> ````
>
> Creates a modified version of a writer type
> which prints out some extra info when encountering
> an error to give a better ballpark idea of what caused it.
> Used in codenode.dump/dumps to implement the debug parameter.
>
>
> #### Parameters
> * > ***writer_type:***
> > Base writer type.
> #### Returns
> * > New child writer type with debug modifications.
>
| 0xf0f-codenode | /0xf0f-codenode-1.0rc1.tar.gz/0xf0f-codenode-1.0rc1/README.md | README.md |
import collections
import functools
import io
import pprint
import typing
from .writer import Writer, WriterStack
class DebugIterator:
def __init__(self, iterable):
self.iterable = iterable
self.iterator = iter(iterable)
self.items_yielded = 0
self.item_buffer = collections.deque(maxlen=8)
def __iter__(self):
return self
def __next__(self):
item = next(self.iterator)
self.items_yielded += 1
self.item_buffer.append(item)
return item
@property
def current_item(self):
return self.item_buffer[-1] if len(self.item_buffer) else None
def print_writer_stack(writer: Writer, stream):
pretty_print = functools.partial(
pprint.pprint,
stream=stream,
depth=2,
compact=False,
indent=2,
width=128,
)
for index, iterator in enumerate(writer.stack.items):
stream.write(f'node #{index}: \n')
stream.write(f'type: {type(iterator.iterable)}\n')
if isinstance(iterator, DebugIterator):
if isinstance(iterator.iterable, typing.Sequence):
for sub_index, sub_item in enumerate(iterator.iterable):
stream.write(f' item {sub_index}: ')
pretty_print(sub_item)
else:
pretty_print(iterator.iterable)
stream.write(
f' last {len(iterator.item_buffer)} items processed: '
f'({iterator.items_yielded} total)\n'
)
for item in iterator.item_buffer:
stream.write(' ')
pretty_print(item)
else:
stream.write(repr(iterator))
stream.write('\n')
stream.write('\n')
def debug_patch(writer_type: typing.Type[Writer]) -> typing.Type[Writer]:
"""
Creates a modified version of a writer type
which prints out some extra info when encountering
an error to give a better ballpark idea of what caused it.
Used in codenode.dump/dumps to implement the debug parameter.
:param writer_type: Base writer type.
:return: New child writer type with debug modifications.
"""
class PatchedWriter(writer_type):
@property
def stack(self):
return self._stack
@stack.setter
def stack(self, stack: WriterStack):
push = stack.push
stack.push = lambda node: push(DebugIterator(node))
self._stack = stack
def dump_iter(self):
try:
yield from super().dump_iter()
except Exception as e:
buffer = io.StringIO()
buffer.write(''.join(map(str, e.args)))
buffer.write('\n\nWriter stack:\n')
print_writer_stack(self, buffer)
e.args = (buffer.getvalue(),)
raise
return PatchedWriter | 0xf0f-codenode | /0xf0f-codenode-1.0rc1.tar.gz/0xf0f-codenode-1.0rc1/codenode/debug.py | debug.py |
import collections
import io
import typing
from .nodes.newline import Newline
from .nodes.indentation import Indentation
from .nodes.depth_change import DepthChange
if typing.TYPE_CHECKING:
from typing import Union, Iterable
NodeType = Iterable[Union[str, 'NodeType']]
class WriterStack:
"""
A stack of iterators.
Used by the Writer class to traverse node trees.
Each instance is intended to be used once then discarded.
"""
def __init__(self):
self.items: collections.deque = collections.deque()
"""
Current items in the stack.
"""
def push(self, node: 'NodeType'):
"""
Converts a node to an iterator then places it at
the top of the stack.
:param node: iterable node
"""
self.items.append(iter(node))
def __iter__(self) -> 'Iterable[NodeType]':
"""
Continually iterates the top iterator in the stack's items,
yielding each result then popping each iterator off when they
are exhausted.
"""
while self.items:
try:
yield next(self.items[-1])
except StopIteration:
self.items.pop()
class Writer:
"""
Processes node trees into strings then writes out the result.
Each instance is intended to be used once then discarded.
After a single call to either dump or dumps, the Writer
instance is no longer useful.
"""
def __init__(
self,
node: 'NodeType', *,
indentation=' ',
newline='\n',
depth=0,
):
"""
:param node: Base node of node tree.
:param indentation: Initial string used for indents in the output.
:param newline: Initial string used for newlines in the output.
:param depth: Base depth (i.e. number of indents) to start at.
"""
self.node = node
"Base node of node tree"
self.stack = WriterStack()
"WriterStack used to iterate over the node tree"
self.stack.push((node,))
self.indentation = indentation
"Current string used for indents in the output"
self.newline = newline
"Current string used for line termination in the output"
self.depth = depth
"Current output depth (i.e. number of indents)"
def process_node(self, node) -> 'Iterable[str]':
"""
Yield strings representing a node and/or apply any of its
associated side effects to the writer
for example:
- yield indentation string when an indentation node is encountered
- increase the current writer depth if an indent is encountered
- append an iterator to the stack when an iterable is encountered
:param node: node to be processed
:returns: strings of text chunks representing the node
"""
if isinstance(node, str):
yield node
elif isinstance(node, DepthChange):
self.depth = node.new_depth_for(self.depth)
elif isinstance(node, Indentation):
yield self.indentation * node.indents_for(self.depth)
elif isinstance(node, Newline):
yield self.newline
else:
try:
self.stack.push(node)
except TypeError as error:
raise TypeError(
f'Unable to process node "{node}".\n'
'Either convert it to a string, iterable or '
'override Writer.process_node to handle nodes '
'of this type.'
) from error
def dump_iter(self) -> 'Iterable[str]':
"""
Process and write out a node tree as an iterable of
string chunks.
:return: Iterable of string chunks.
"""
for node in self.stack:
yield from self.process_node(node)
def dump(self, stream):
"""
Process and write out a node tree to a stream.
:param stream: An object with a 'write' method.
"""
for chunk in self.dump_iter():
stream.write(chunk)
def dumps(self):
"""
Process and write out a node tree as a string.
:return: String representation of node tree.
"""
buffer = io.StringIO()
self.dump(buffer)
return buffer.getvalue() | 0xf0f-codenode | /0xf0f-codenode-1.0rc1.tar.gz/0xf0f-codenode-1.0rc1/codenode/writer.py | writer.py |
from .writer import Writer
from .nodes.depth_change import RelativeDepthChange
from .nodes.indentation import CurrentIndentation
from .nodes.newline import Newline
from .debug import debug_patch
default_writer_type = Writer
"Default Writer type used in codenode.dump and codenode.dumps."
indent = RelativeDepthChange(1)
"A node representing a single increase in indentation level."
dedent = RelativeDepthChange(-1)
"A node representing a single decrease in indentation level."
indentation = CurrentIndentation()
"A placeholder node for indentation whitespace at the start of a line."
newline = Newline()
"A placeholder node for line terminators."
def line(content: 'T') -> 'tuple[Indentation, T, Newline]':
"""
Convenience function that returns a tuple containing
an indentation node, line content and a newline node.
:param content: content of line
:return: tuple containing an indentation node, line content and
a newline node.
"""
return indentation, content, newline
def lines(*items) -> tuple[tuple, ...]:
"""
Convenience function that returns a tuple of lines,
where each argument is the content of one line.
:param items: contents of lines
:return: tuple of lines
"""
return tuple(map(line, items))
def empty_lines(count: int) -> 'tuple[Newline, ...]':
"""
Convenience function that returns a tuple of newline nodes.
:param count: Number of newlines.
:return: Tuple of newlines.
"""
return (newline,) * count
def indented(*nodes) -> tuple:
"""
Convenience function that returns a tuple containing an indent node,
some inner nodes, and a dedent node.
:param nodes: inner nodes
:return: tuple containing an indent node, inner nodes, and a dedent node.
"""
return indent, nodes, dedent
def dump(
node, stream, *,
indentation=' ',
newline='\n',
depth=0,
debug=False,
):
"""
Process and write out a node tree to a stream.
:param node: Base node of node tree.
:param stream: An object with a 'write' method.
:param indentation: String used for indents in the output.
:param newline: String used for newlines in the output.
:param depth: Base depth (i.e. number of indents) to start at.
:param debug: If True, will print out extra info when an error
occurs to give a better idea of which node caused it.
"""
if debug:
writer_type = debug_patch(default_writer_type)
else:
writer_type = default_writer_type
return writer_type(
node,
indentation=indentation,
newline=newline,
depth=depth,
).dump(stream)
def dumps(
node, *,
indentation=' ',
newline='\n',
depth=0,
debug=False,
) -> str:
"""
Process and write out a node tree as a string.
:param node: Base node of node tree.
:param indentation: String used for indents in the output.
:param newline: String used for newlines in the output.
:param depth: Base depth (i.e. number of indents) to start at.
:param debug: If True, will print out extra info when an error
occurs to give a better idea of which node caused it.
:return: String representation of node tree.
"""
if debug:
writer_type = debug_patch(default_writer_type)
else:
writer_type = default_writer_type
return writer_type(
node,
indentation=indentation,
newline=newline,
depth=depth,
).dumps()
__all__ = [
'indent', 'dedent', 'indented',
'indentation', 'newline',
'line', 'lines', 'empty_lines',
'dump', 'dumps', 'default_writer_type',
] | 0xf0f-codenode | /0xf0f-codenode-1.0rc1.tar.gz/0xf0f-codenode-1.0rc1/codenode/__init__.py | __init__.py |
from codenode import indent, dedent, dump, dumps
import typing
T = typing.TypeVar('T')
class PartitionedNode:
"""
A node with three separate sections: a header, an indented body and
a footer.
Keeps track of child nodes using a list, which is yielded as the
default body.
Has convenience methods for adding children and dumping output using
the default Writer type.
"""
def __init__(self):
self.children = []
"""
Node in the body section.
"""
def header(self) -> 'Iterable':
"""
Starting section of node.
"""
yield from ()
def body(self) -> 'Iterable':
"""
Middle section of node.
Yields children by default.
"""
yield from self.children
def footer(self) -> 'Iterable':
"""
Ending section of node.
"""
yield from ()
def __iter__(self):
yield from self.header()
yield indent
yield from self.body()
yield dedent
yield from self.footer()
def add_child(self, node: 'T') -> 'T':
"""
Add a node to this node's children.
:param node: Node to add.
:return: Added node.
"""
self.children.append(node)
return node
def add_children(self, nodes: typing.Iterable[T]) -> typing.Iterable[T]:
"""
Add multiple nodes to this node's children.
:param nodes: Nodes to add.
:return: The added nodes
"""
self.children.extend(nodes)
return nodes
def dump(
self, stream, *,
indentation=' ',
newline='\n',
depth=0,
debug=False,
):
"""
Process and write out this node to a stream.
:param stream: An object with a 'write' method.
:param indentation: String used for indents in the output.
:param newline: String used for newlines in the output.
:param depth: Base depth (i.e. number of indents) to start at.
:param debug: If True, will print out extra info when an error
occurs to give a better idea of which node caused it.
"""
return dump(
self,
stream,
indentation=indentation,
newline=newline,
depth=depth,
debug=debug,
)
def dumps(
self, *,
indentation=' ',
newline='\n',
depth=0,
debug=False,
):
"""
Process and write out this node as a string.
:param indentation: String used for indents in the output.
:param newline: String used for newlines in the output.
:param depth: Base depth (i.e. number of indents) to start at.
:param debug: If True, will print out extra info when an error
occurs to give a better idea of which node caused it.
:return: String representation of node.
"""
return dumps(
self,
indentation=indentation,
newline=newline,
depth=depth,
debug=debug,
) | 0xf0f-codenode | /0xf0f-codenode-1.0rc1.tar.gz/0xf0f-codenode-1.0rc1/codenode_utilities/partitioned_node.py | partitioned_node.py |
import codenode
import io
import typing
# from .node_transformer import node_transformer
# from codenode.nodes.indentation import AbsoluteIndentation, CurrentIndentation
# from codenode.nodes.depth_change import RelativeDepthChange
#
#
# def prefixer(prefix):
# """
#
# :param prefix:
# :return:
# """
# def prefixed(node):
# indents = 0
#
# @node_transformer
# def transform(node):
# nonlocal indents
# if isinstance(node, RelativeDepthChange):
# indents += node.offset
# yield node
# elif isinstance(node, CurrentIndentation):
# yield RelativeDepthChange(-indents)
# yield node
# yield prefix
# yield AbsoluteIndentation(indents)
# yield RelativeDepthChange(indents)
# else:
# yield node
#
# yield from transform(node)
# return prefixed
# from .node_transformer import NodeTransformer
#
#
# def prefixer(prefix: str):
# def prefixed(node):
# indents = 0
#
# class Prefixer(NodeTransformer):
# def transform(self, node):
# nonlocal indents
# if isinstance(node, RelativeDepthChange):
# indents += node.offset
# yield node
# elif isinstance(node, CurrentIndentation):
# yield RelativeDepthChange(-indents)
# yield node
# yield prefix
# yield AbsoluteIndentation(indents)
# yield RelativeDepthChange(indents)
# else:
# yield node
#
# return Prefixer(node)
#
# return prefixed
# import inspect
#
# def get_writer_type():
# stack = inspect.stack()
# for frame_info in stack:
# try:
# cls = frame_info.frame.f_locals['__class__']
# except KeyError:
# continue
# else:
# if issubclass(cls, codenode.Writer):
# return cls
#
#
def yield_lines(iterator: typing.Iterable[str]):
buffer = io.StringIO()
for chunk in iterator:
newline_position = chunk.find('\n')
if newline_position >= 0:
buffer.write(chunk[:newline_position])
yield buffer.getvalue()
buffer.seek(0)
buffer.write(chunk[newline_position+1:])
buffer.truncate()
else:
buffer.write(chunk)
if buffer.tell():
yield buffer.getvalue()
#
#
# def prefixer(prefix: str):
# def prefixed(
# node,
# dump_iter=None,
# ):
# if dump_iter is None:
# writer_type = get_writer_type()
# dump_iter = lambda node: writer_type(node).dump_iter()
#
# for line_content in yield_lines(dump_iter(node)):
# yield codenode.line(f'{prefix}{line_content}')
# return prefixed
def prefixer(prefix: str):
"""
Returns a node transformer that adds a string to the
start of every line in the output of a node.
:param prefix: String to place at the start of lines.
:return: A function that takes a node as an argument,
along with a function to convert a node to a string
(i.e. codenode.dumps). It calls this function with
the given node, then returns new nodes containing each
line in the string along with the prefix at the start.
"""
def prefixed(
node,
dumps=lambda node: codenode.dumps(node),
):
for line_content in dumps(node).splitlines():
yield codenode.line(f'{prefix}{line_content}')
return prefixed
def prefixer_iter(prefix: str):
"""
Returns a node transformer that adds a string to the
start of every line in the output of a node. Works iteratively,
line by line, rather than rendering out the node all at once.
:param prefix: String to place at the start of lines.
:return: A function that takes a node as an argument,
along with a function to convert a node to an iterable of
strings (i.e. codenode.Writer.dump_iter).
It calls this function with the given node, then returns
new nodes containing each line in the output along with the
prefix at the start.
"""
def prefixed(
node,
dump_iter=lambda node: codenode.default_writer_type(node).dump_iter()
):
for line_content in yield_lines(dump_iter(node)):
yield codenode.line(f'{prefix}{line_content}')
return prefixed | 0xf0f-codenode | /0xf0f-codenode-1.0rc1.tar.gz/0xf0f-codenode-1.0rc1/codenode_utilities/prefixer.py | prefixer.py |
import math
from base64 import b64encode
from html.parser import HTMLParser
from typing import Optional, Tuple
from urllib.parse import parse_qs, urlparse
import requests
API_BASE = "http://10.0.0.55"
def parse_homepage() -> Tuple[str, str]:
"""Parse homepage of 10.0.0.55 and get the acid + ip of current session
Raises:
Exception: Throw exception if acid not present in the redirected URL
Exception: Throw exception if response text does not contain IP
Returns:
Tuple[str, str]: Both the ip and the acid of the current session
"""
res = requests.get(API_BASE)
# ac_id appears in the url query parameter of the redirected URL
query = parse_qs(urlparse(res.url).query)
ac_id = query.get("ac_id")
if not ac_id:
raise Exception("failed to get acid")
# ip appears in the response HTML
class IPParser(HTMLParser):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.ip = None
def handle_starttag(self, tag, attrs):
if tag == "input":
attr_dict = dict(attrs)
if attr_dict.get("name") == "user_ip":
self.ip = attr_dict["value"]
def feed(self, *args, **kwargs):
super().feed(*args, **kwargs)
return self.ip
parser = IPParser()
ip = parser.feed(res.text)
if not ip:
raise Exception("failed to get ip")
return ip, ac_id[0]
def get_user_info() -> Tuple[bool, Optional[str]]:
"""Get current logged in user info if exists
Returns:
tuple[bool, Optional[str]]
- a boolean indicating whether the current IP is logged in
- the username of the current logged in user if exists
"""
is_logged_in = True
username = None
resp = requests.get(API_BASE + "/cgi-bin/rad_user_info")
data = resp.text
if data == "not_online_error":
is_logged_in = False
else:
username = data.split(",")[0]
return is_logged_in, username
def fkbase64(raw_s: str) -> str:
"""Encode string with a magic base64 mask"""
trans = str.maketrans(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",
"LVoJPiCN2R8G90yg+hmFHuacZ1OWMnrsSTXkYpUq/3dlbfKwv6xztjI7DeBE45QA",
)
ret = b64encode(bytes(ord(i) & 0xFF for i in raw_s))
return ret.decode().translate(trans)
def xencode(msg, key) -> str:
def sencode(msg, key):
def ordat(msg, idx):
if len(msg) > idx:
return ord(msg[idx])
return 0
msg_len = len(msg)
pwd = []
for i in range(0, msg_len, 4):
pwd.append(ordat(msg, i) | ordat(msg, i + 1) << 8 | ordat(msg, i + 2) << 16 | ordat(msg, i + 3) << 24)
if key:
pwd.append(msg_len)
return pwd
def lencode(msg, key) -> str:
msg_len = len(msg)
ll = (msg_len - 1) << 2
if key:
m = msg[msg_len - 1]
if m < ll - 3 or m > ll:
return ""
ll = m
for i in range(0, msg_len):
msg[i] = chr(msg[i] & 0xFF) + chr(msg[i] >> 8 & 0xFF) + chr(msg[i] >> 16 & 0xFF) + chr(msg[i] >> 24 & 0xFF)
if key:
return "".join(msg)[0:ll]
return "".join(msg)
if msg == "":
return ""
pwd = sencode(msg, True)
pwdk = sencode(key, False)
if len(pwdk) < 4:
pwdk = pwdk + [0] * (4 - len(pwdk))
n = len(pwd) - 1
z = pwd[n]
y = pwd[0]
c = 0x86014019 | 0x183639A0
m = 0
e = 0
p = 0
q = math.floor(6 + 52 / (n + 1))
d = 0
while 0 < q:
d = d + c & (0x8CE0D9BF | 0x731F2640)
e = d >> 2 & 3
p = 0
while p < n:
y = pwd[p + 1]
m = z >> 5 ^ y << 2
m = m + ((y >> 3 ^ z << 4) ^ (d ^ y))
m = m + (pwdk[(p & 3) ^ e] ^ z)
pwd[p] = pwd[p] + m & (0xEFB8D130 | 0x10472ECF)
z = pwd[p]
p = p + 1
y = pwd[0]
m = z >> 5 ^ y << 2
m = m + ((y >> 3 ^ z << 4) ^ (d ^ y))
m = m + (pwdk[(p & 3) ^ e] ^ z)
pwd[n] = pwd[n] + m & (0xBB390742 | 0x44C6F8BD)
z = pwd[n]
q = q - 1
return lencode(pwd, False) | 10-0-0-55 | /10_0_0_55-2.0.4.tar.gz/10_0_0_55-2.0.4/10_0_0_55/utils.py | utils.py |
import hmac
import json
from hashlib import sha1
from typing import Dict, Union
from requests import Session
from .action import Action
from .exception import AlreadyLoggedOutException, AlreadyOnlineException, UsernameUnmatchedException
from .utils import fkbase64, get_user_info, parse_homepage, xencode
API_BASE = "http://10.0.0.55"
TYPE_CONST = 1
N_CONST = 200
class User:
def __init__(self, username: str, password: str):
self.username = username
self.password = password
self.ip, self.acid = parse_homepage()
self.session = Session()
def do_action(self, action: Action) -> Dict[str, Union[str, int]]:
# Check current state - whether device is logged in and whether current user the same as the provided one
is_logged_in, username = get_user_info()
if is_logged_in and action is Action.LOGIN:
raise AlreadyOnlineException(f"{username}, you are already online")
if not is_logged_in and action is Action.LOGOUT:
raise AlreadyLoggedOutException("you have already logged out")
# Raise exception only if username exists on this IP and command line arguments provided another username
if username and username != self.username:
raise UsernameUnmatchedException(
f"current logged in user {username} and provided username {self.username} does not match"
)
# Perform login or logout action
params = self._make_params(action)
response = self.session.get(API_BASE + "/cgi-bin/srun_portal", params=params)
return json.loads(response.text[6:-1])
def _get_token(self) -> str:
params = {"callback": "jsonp", "username": self.username, "ip": self.ip}
response = self.session.get(API_BASE + "/cgi-bin/get_challenge", params=params)
result = json.loads(response.text[6:-1])
return result["challenge"]
def _make_params(self, action: Action) -> Dict[str, str]:
token = self._get_token()
params = {
"callback": "jsonp",
"username": self.username,
"action": action.value,
"ac_id": self.acid,
"ip": self.ip,
"type": TYPE_CONST,
"n": N_CONST,
}
data = {
"username": self.username,
"password": self.password,
"acid": self.acid,
"ip": self.ip,
"enc_ver": "srun_bx1",
}
hmd5 = hmac.new(token.encode(), b"", "MD5").hexdigest()
json_data = json.dumps(data, separators=(",", ":"))
info = "{SRBX1}" + fkbase64(xencode(json_data, token))
chksum = sha1(
"{0}{1}{0}{2}{0}{3}{0}{4}{0}{5}{0}{6}{0}{7}".format(
token, self.username, hmd5, self.acid, self.ip, N_CONST, TYPE_CONST, info
).encode()
).hexdigest()
params.update({"password": "{MD5}" + hmd5, "chksum": chksum, "info": info})
return params | 10-0-0-55 | /10_0_0_55-2.0.4.tar.gz/10_0_0_55-2.0.4/10_0_0_55/user.py | user.py |
# 80+ Python Projects by MRayan Asim 🐍🚀
<p align="center">
<img src="https://github.com/mrayanasim09/python-projects/raw/main/MRayan.png" alt="My Logo" style="max-width: 100%; max-height: 100%;">
</p>
## Table of Contents
- [Repository Structure 📂](#repository-structure-)
- [Categories 🗂️](#categories-%EF%B8%8F)
- [Projects 🔥](#projects-)
- [*GUI 🖥️*](#gui-️)
- [*Calculator 🧮*](#calculator-)
- [*Games 🎮*](#games-)
- [*Machine Learning 🤖📚🧠*](#machine-learning-)
- [*Utilities 🛠️*](#utilities-️)
- [Skill Level Tags ⭐](#skill-level-tags-)
- [Installation ⚙️](#installation-️)
- [About the Author 👤](#about-the-author-)
- [License 📝](#license-)
- [Note 📌](#note-)
- [FAQ 🗒️](#frequently-asked-questions-faq-%EF%B8%8F)
**🚀 Welcome to the mesmerizing realm of the Python Projects repository, curated by MRayan Asim! Get ready to embark on an exhilarating coding odyssey, where a trove of captivating Python creations awaits to inspire and empower developers of all levels. Whether you're taking your first coding steps or you're a seasoned programmer crafting intricate algorithms, this repository serves as your gateway to a world of endless possibilities. Discover gems that boost your resume's brilliance and engage in student projects that foster learning. Uncover a wealth of resources, expert guidance, and hands-on code examples that breathe life into your Python-based projects. Join us on this thrilling journey as we unveil the extraordinary potential of Python together! 💡🔥**
<p align="center">
<a href="https://forms.gle/SzJ4VA1zWZ3ehqGC6">
<img src="https://img.shields.io/badge/Google%20Forms-Give%20Your%20Feedback-red?style=for-the-badge&logo=google-forms" alt="Give me your feedback">
</a>
</p>
## Repository Structure 📂
The repository is organized into different categories, each containing specific project folders. This structure allows for easy navigation and helps you find projects that align with your interests. Each project is tagged with appropriate labels to indicate the recommended skill level. Let's take a look at the categories available:
## Categories 🗂️
* [GUI](https://github.com/drik493/python\_projects/tree/main/GUI) 🖥️
* [Calculator](https://github.com/drik493/python\_projects/tree/main/Calculator) 🧮
* [Games](https://github.com/drik493/python\_projects/tree/main/Game) 🎮
* [Machine learning](https://github.com/mrayanasim09/python-projects/tree/main/machine\_learning) 🤖📚🧠
* [Utilities](https://github.com/drik493/python\_projects/tree/main/Utilities) 🛠️
## Projects 🔥
Explore the projects in each category to find detailed information, documentation, and code examples. Here's a glimpse of the projects available within each category:
## *GUI 🖥️*
* [Form](https://github.com/drik493/python\_projects/blob/main/GUI/Form.py) 📝
* [A basic GUI calculator](https://github.com/drik493/python\_projects/blob/main/GUI/A\_basic\_gui\_calculator.py) 🧮
* [A working GUI clock also download the clock image](GUI/clock.py) 🕤
* [Tick cross (with GUI) ](GUI/tick\_cross.py)✔️❌
* [Todo list (with GUI)](GUI/todo.py) ✅📝
* [Notepad](https://github.com/drik493/python\_projects/blob/main/GUI/notepad.py) 📄
* [A snake and ladder game ](GUI/snake\_ladder.py)and [(also download the images with it)](GUI/ezgif-5-ad15f112d4.gif) 🐍🪜
* [A paint application](GUI/paint.py)🖌️🎨
* [A file explorer](GUI/file\_explorer.py) 📂🔎
* [Youtube video downloader](GUI/youtube\_download.py) 📺🔽💾
* [spelling correction](GUI/spelling.py) 🔤📏🔍
* [Figet spinner (use it on windows with space bar)](GUI/spinner.py) ߷
* [A beautiful design using turtle](GUI/graphics.py) 🐢🎨
* [A quiz application for asking common questions ](https://github.com/mrayanasim09/python-projects/blob/main/GUI/Quiz.py) 👉📜
* [Pikachu using turtle](GUI/Pikachu.py) (っ◔◡◔)っ
* [Doraemon using turtle](GUI/doramon.py)🐱🚀
* [Rainbow with turtle ](GUI/rainbow.py)🌈
* [A happy birthday message to the user with its name](GUI/happy\_birth\_day.py)🎂
* [Search installed applications](GUI/search\_applications.py) 🔍
* [A GUI calendar ](GUI/clender.py)📅
## *Calculator 🧮*
* [Quadratic Equation (with graph)](https://github.com/drik493/python\_projects/blob/main/Calculator/Quadratic\_Equation.py) 📈
* [A mega calculator with all operations](https://github.com/drik493/python\_projects/blob/main/Calculator/mega\_calculator.py) 🖩
* [A stock analyzer with its short form](Calculator/stock.py) 💵📊📈
* [Number base converter](https://github.com/drik493/python\_projects/blob/main/Calculator/number\_base.py) 🔢
* [Integration and differentiation](https://github.com/drik493/python\_projects/blob/main/Calculator/int\_diff.py) ∫
* [BMI calculator](https://github.com/drik493/python\_projects/blob/main/Calculator/bmi.py) 🏋️
* [Roman number convertor to decimal number](Calculator/roman\_number.py) 🧠
* [Time calculator](https://github.com/mrayanasim09/python-projects/blob/main/Calculator/time_calulator.py) ☀️🌙
* [special theory of relativity calculator](Calculator/special\_relativity\_calculator.py) ⌛📏⚡
* [Collatz Conjecture (3x+1) (with GUI)](https://github.com/drik493/python\_projects/blob/main/Calculator/conject.py) 📐
* [Fibonacci sequence](https://github.com/drik493/python\_projects/blob/main/Calculator/sequence.py) 🐇
* [Graph calculator from equation (with graph)](https://github.com/drik493/python\_projects/blob/main/Calculator/graph.py) 📊
* [Montly Mortgage calculator](Calculator/Mortgage.py) 📈💴
* [12 hour time into 24 hour time](Calculator/12\_to\_24.py) 🕰️🕛
* [Grade calculator](https://github.com/drik493/python\_projects/blob/main/Calculator/grade.py) 🎓
* [Sudoku solver](https://github.com/drik493/python\_projects/blob/main/Calculator/sudukko.py) 🧩
* [A program to find the ASCII value of characters](Calculator/ASCII%20.py) 💻🔧
## *Games 🎮*
* [2048 game (without GUI)](https://github.com/drik493/python\_projects/blob/main/Game/2048.py) 🎲
* [Snake game (with GUI)](https://github.com/drik493/python\_projects/blob/main/Game/snake\_game.py) 🐍
* [Hangman](https://github.com/drik493/python\_projects/blob/main/Game/hangman.py) 🪓
* [Colox (a box colliding game with GUI)](Game/colox.py) 📦❄️
* [A color guessing game with GUI](Game/color\_guessing.py) 🎨🔍🌈
* [Master Mind](https://github.com/drik493/python\_projects/blob/main/Game/master\_mid.py) 🔐
* [A number details (prime, odd, co-prime, etc)](https://github.com/drik493/python\_projects/blob/main/Game/number\_details.py) 🔢
* Tick cross [(with GUI)](https://github.com/drik493/python\_projects/blob/main/Game/tick\_cross.py) or [(without GUI)](Game/tick\_cross\_gui.py) ❌⭕
* [Rock, paper, and scissors (without GUI)](https://github.com/drik493/python\_projects/blob/main/Game/rock,paper,scissors.py) ✊🖐✌️
* [A snake and ladder game ](Game/snake\_ladder.py)and [(also download the images with it)](Game/ezgif-5-ad15f112d4.gif) 🐍🪜
* [21 or 20 plus game](https://github.com/drik493/python\_projects/blob/main/Game/21.py) 🃏
* [Typing speed test](Game/typing\_speed.py) 🎮
* [Star patterns (7 types of patterns)](https://github.com/drik493/python\_projects/blob/main/Game/star.py) ✨
* [Dice rolling (With user guess without GUI)](https://github.com/drik493/python\_projects/blob/main/Game/dice.py) 🎲
* [Number guessing game](https://github.com/drik493/python\_projects/blob/main/Game/number\_guessing.py) 🔢❓
## *Machine Learning 🤖📚🧠*
* [Brightness controller with your hand](machine\_learning/brightness\_controllor.py) 🌞💡🎛️
* [Eye blink detection (also download the . XML files)](machine\_learning/eye\_blink.py) 👁️🔍😴
* [Text to speech](machine\_learning/text\_to\_speech.py) 🔤🔉
* [A language detector ](machine\_learning/lang\_dect.py)🔍🌐
* [A spam message delectation using machine learning ](machine\_learning/spam\_dect.py)🎁🎉🎈
* [Crypto price predictions (for days ahead of days entered by the user)](machine\_learning/crypto\_prices.py) 🚀🌕
* [Gold price predictions (for days ahead of days entered by the user)](machine\_learning/gold\_price.py) 💰🪙
* [Your phone camera on your PC ](machine\_learning/camera.py)you can check more about it [here](https://www.makeuseof.com/tag/ip-webcam-android-phone-as-a-web-cam/) 📱💻📸
* [A sentiments checker](machine\_learning/sentiments.py) 🤔💬💭
* [A sketch maker of image ](machine\_learning/sketch.py)🖌️
## *Utilities 🛠️*
* [Network passwords (only for the networks you have been connected to)](https://github.com/drik493/python\_projects/blob/main/Utilities/network.py) 🔐
* [Your own browser](Utilities/browser.py) 🌐
* [A site connection checker and timer](https://github.com/mrayanasim09/python-projects/blob/main/Utilities/connectivity.py) 🔗🌐
* [Count down (timer)](https://github.com/drik493/python\_projects/blob/main/Utilities/count\_down.py) ⏳
* [Tells basic information of an Instagram account only from user name](Utilities/inta.py) 📸
* [Transfer file (generate QR code for easy access)](https://github.com/drik493/python\_projects/blob/main/Utilities/transfer.py) 📁
* [Google search (from terminal)](https://github.com/drik493/python\_projects/blob/main/Utilities/google.py) 🔍
* [A password manager with a master key and encryption and decryption of passwords](Utilities/password\_manager.py) 🔐
* [bitcoin mining simulator](Utilities/btc.py) ₿
* [QR code generator](https://github.com/drik493/python\_projects/blob/main/Utilities/url.py) 🔗
* [Wattsapp spam messages sender (you should click on the message bar of WhatsApp after running it)](Utilities/whatsapp\_spam.py) 📧🔁📧🔁📧🔁
* [Github repository details finder (only with username and name of the repository)](Utilities/github.py) :octocat:
* [Secret code generator (with decoding support)](https://github.com/drik493/python\_projects/blob/main/Utilities/secret\_code.py) 🤐
* [Password to hash form (md5)](https://github.com/drik493/python\_projects/blob/main/Utilities/password\_hash.py) 🔒
* [Hash password cracking (md5 only, using rockyou.txt)](https://github.com/drik493/python\_projects/blob/main/Utilities/password.py) 🚫🔍
* [Password generator](https://github.com/drik493/python\_projects/blob/main/Utilities/passwrd\_generator.py) 🔐🔢
* [Birth Day Finder (also zodiac sign, life path number, your birth date according to Islam and birthstone and birth flower)](https://github.com/drik493/python\_projects/blob/main/Utilities/birthday.py) 🎂🎉
* [words and letter count of given text](Utilities/word\_count.py) 🔢🔄️
* [A program to make short forms for the entered words](Utilities/short\_form.py) 🔤🔄
## Skill Level Tags ⭐
Projects are labeled with the following tags to help you identify their recommended skill level:
* Beginner: Suitable for beginners who are new to Python programming. 🌱
* Intermediate: Projects that require a moderate level of Python programming knowledge. 🚀
* Advanced: Projects that involve advanced concepts and techniques in Python. 🧠
## Installation ⚙️
we used these packages in our repository:
* Pygame 🎮
* Tkinter 🖼️
* GoogleSearch 🔍
* qrcode 📷
* Matplotlib 📊
* yfinance 💵📈
* Turtle 🐢
* Random 🎲
* Time ⏰
* Pillow 🖼️
* NumPy 🔢
* openpyxl 📄
* Datetime ⌚
* math ➗
* requests 🌐
* hijri\_converter 🌙
* threading 🧵
* instaloader 📥
* string 🔡
* hashlib 🔒
* socketserver 🖧
* socket 🧦
* http.server 🌐
* os 🖥️
* opencv 📷👁️
* langdetect 🌍
* sys 🔄💻
* json 🧩📄🔍
* re 🧩
* pyshorteners 🧹
* PyQt5 🐍🖼️🔌
* PyQtWebEngine: 🕸️🖼️🔌
* Panda 🐼🎉🐾
* textblob 📝📊🔍
* vaderSentiment 🤖💭📈
* pyttsx3 🔊🗣️
* winapps 👁️📂
* pytube 📼
* screen-brightness-control 🌞🖥️🔆
* pyautogui 📦🔧💻🐍
* mediapipe 🎥📡🤝
* prophet 🔮📈
* seaborn 📊🌈
You can install these packages using pip, the Python package manager. Open your terminal or command prompt and run the following commands:
```shell
pip install pygame
pip install googlesearch-python
pip install qrcode
pip install pyautogui
pip install pyttsx3
pip install winapps
pip install matplotlib
pip install tkcalendar
pip install pyqt5
pip install pyqtwebengine
pip install yfinance
pip install pillow
pip install openpyxl
pip install sympy
pip install pytube
pip install hijri_converter
pip install requests
pip install instaloader
pip install opencv-python
pip install textblob
pip install vaderSentiment
pip install langdetect
pip install screen-brightness-control
pip install numpy
pip install prophet
pip install seaborn
pip install mediapipe
pip install pyshorteners
```
### *To view more details on how to use this repository you can go* [_**here**_](How\_to\_use.md)
If you encounter any issues running the code, please report an issue, and I will respond as quickly as possible. 🐞
# About the Author 👤
MRayan Asim maintains this repository. As a passionate Python enthusiast, MRayan Asim is dedicated to developing practical and innovative projects. Whether you're a beginner or an experienced developer, MRayan Asim strives to provide projects that cater to various skill levels. If you have any questions or suggestions regarding the projects in this repository, feel free to reach out. 🚀\
[![Join our Discord](https://img.shields.io/badge/Join%20our%20Discord-7289DA?style=flat\&logo=discord\&logoColor=white)](https://discord.gg/uRfXYjub) [![Join Our Reddit Community](https://img.shields.io/badge/Join%20the%20Community-Reddit-orange)](https://www.reddit.com/r/Python\_projects\_rayan/) [![Email](https://img.shields.io/badge/Email-mrayanasim09%40gmail.com-%23D14836?logo=gmail)](mailto:[email protected]) [![LinkedIn](https://img.shields.io/badge/LinkedIn-View%20Profile-blue?logo=linkedin)](https://www.linkedin.com/in/mrayan-asim-044836275/) [![GitHub](https://img.shields.io/badge/GitHub-mrayanasim09-blue?logo=github)](https://github.com/mrayanasim09)
### *If you are thinking about how to start learning programming so you can check out my* [_roadmap on medium_](https://mrayanasim09.medium.com/how-to-start-learning-programming-from-beginners-to-advance-14248dcc7afa)
# License 📝
### *⚠️ DISCLAIMER: For educational purposes only. Code provided under* [![MIT License](https://img.shields.io/badge/License-MIT-blue.svg)](LICENSE/) ⚖️
# **Note 📌**
#### Feeling inspired to be a part of our dynamic community? Begin your journey by familiarizing yourself with our [**Code of Conduct**](code\_of\_conduct.md). We believe in a supportive and inclusive environment where everyone can thrive.
#### Ready to make your mark on our projects? Check out our [**How to Contribute**](CONTRIBUTING.md) guide, and embark on your coding adventure with us!
#### Excited to play a vital role in securing our projects? Explore the essential steps and best practices in our [**Security Policies**](SECURITY.md) to safeguard our coding community. Join hands with us on this crucial mission!
#### Discover a treasure trove of Python projects! From GUIs to machine learning, this repository offers many practical code examples and resources. **[Check out the summary](summary.md)** to explore our diverse collection and embark on your coding adventure with us!
### 🔍 To view the requirements for the system and Python version, you can check out the [prerequisites](https://github.com/mrayanasim09/python-projects/blob/main/prerequisites.md) 📋
# Frequently Asked Questions (FAQ) 🗒️
## *For common questions and troubleshooting tips, please check our [FAQ](FAQ.md)*
### *Remember, the world of coding is full of wonders, and your journey starts right here! 🌟*
# 🌟 **Sponsor Me and Fuel My Creativity** 🌟
If you find my Python projects valuable and would like to show your support, consider sponsoring me! Your generous contribution empowers me to continue developing innovative and practical projects for the coding community. A simple gesture like buying me a coffee goes a long way in keeping me fueled for more coding sessions. ☕️
<p align="center">
<a href="https://www.buymeacoffee.com/mrayanasim" target="_blank">
<img src="https://cdn.buymeacoffee.com/buttons/v2/default-yellow.png" alt="Buy Me A Coffee" height="50px">
</a>
</p>
💎 For those who prefer cryptocurrency, you can send some Ether (ETH) to my Ethereum wallet address: _**0xEC55fFf7a8387eeaa0Ef886305350Ab3578CE5D3**_. Your sponsorship means the world to me and serves as a powerful motivation to keep creating exciting Python projects for everyone to enjoy. 🚀🐍
🙏 Thank you for your incredible support! Your contributions inspire me to reach new heights and make a positive impact in the coding community. Let's create something amazing together! 🌟
![GitHub code size in bytes](https://img.shields.io/github/languages/code-size/mrayanasim09/python-projects) ![GitHub repo size](https://img.shields.io/github/repo-size/mrayanasim09/python-projects) ![GitHub top language](https://img.shields.io/github/languages/top/mrayanasim09/python-projects) ![GitHub contributors](https://img.shields.io/github/contributors-anon/mrayanasim09/python-projects) ![Visitors](https://api.visitorbadge.io/api/visitors?path=https%3A%2F%2Fgithub.com%2Fmrayanasim09%2Fpython-projects\&label=Views\&countColor=%23555555\&style=flat-square) [![codebeat badge](https://codebeat.co/badges/6fdc6dd9-f8b4-4af7-82bf-5dfc44c69273)](https://codebeat.co/projects/github-com-mrayanasim09-python-projects-main) [![CodeFactor](https://www.codefactor.io/repository/github/mrayanasim09/python-projects/badge)](https://www.codefactor.io/repository/github/mrayanasim09/python-projects) [![DeepSource](https://app.deepsource.com/gh/mrayanasim09/python-projects.svg/?label=active+issues&show_trend=true&token=R4sWBGxzRPv6AjY4YoLiE-wT)](https://app.deepsource.com/gh/mrayanasim09/python-projects/?ref=repository-badge) [![Backup Status](https://cloudback.it/badge/mrayanasim09/python-projects)](https://cloudback.it) ![GitHub last commit (branch)](https://img.shields.io/github/last-commit/mrayanasim09/python-projects/main) ![GitHub commit activity (branch)](https://img.shields.io/github/commit-activity/w/mrayanasim09/python-projects/main) ![GitHub release (with filter)](https://img.shields.io/github/v/release/mrayanasim09/python-projects)
## *To View full Documentations you can go [here](https://mrayans.gitbook.io/python--projects/)*
<script src="//code.tidio.co/ytw5wbhm91dwsvp9mv9gdiob6za99eer.js" async></script>
<script src="https://cdn.ingest-lr.com/LogRocket.min.js" crossorigin="anonymous"></script>
<script>window.LogRocket && window.LogRocket.init('93y3w1/python-projects');</script>
| 100-python-projects | /100_python_projects-0.5.tar.gz/100_python_projects-0.5/README.md | README.md |
<h1></h1>
<p>
<div class="separator" style="clear: both;"><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" style="display: block; padding: 1em 0px; text-align: center;" target="_blank"></a><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" style="display: inline; padding: 1em 0px;" target="_blank"><img border="0" data-original-height="66" data-original-width="342" src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgTJ6OH0OJANSM_yJIoRArK0LR0CRaOEWTSm3mxxP23WdYgh3mQxKobSFzZrDFzIqCaNEnzzoXzexZ1XKUJF7eXiyCoKlBw1aQ3BOM5_92szbWpIjMKbFIasd51DpFoYG7UWvAn4rqfDqZe_nR8Ct0_ubH2WPREFJC_cJviYwd5Kpp3CtTabVq34YqWJA/s16000/button_download-now.png" /></a></div><div class="separator" style="clear: both;"><br /><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">Would you like to achieve your dream of being a successful Forex trader? </p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">Take the shortcut to success with the state of the art Forex algorithm from 1000pip Climber. This trading system is rated 5 star on Investing.com and has verififed performance history from MyFXBook, so you can be confident that you are using the best algorithm available. </p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;"><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" target="_blank">>>>Click here to learn more</a></p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">This unique Forex system continuously analyses the FX market, looking for potentially high probability price movements. Once identified the software will notify you visually, audibly, and via email. </p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">ALL key parameters are provided; entry price, take profit and stop loss. The Forex system is easy to set up and is designed to be followed 100% mechanically – just try the Forex system and see the results. This Forex system really is the simplest way to follow the FX market. </p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">This is a rare opportunity to use a professional Forex trading algorithm that has produced highly accurate and consistent results. Join our group of loyal members and see how you can revolutionize your trading.</p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;"><br /></p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;"><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" style="display: inline; font-family: -webkit-standard; padding: 1em 0px;" target="_blank"><img border="0" data-original-height="66" data-original-width="342" src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgTJ6OH0OJANSM_yJIoRArK0LR0CRaOEWTSm3mxxP23WdYgh3mQxKobSFzZrDFzIqCaNEnzzoXzexZ1XKUJF7eXiyCoKlBw1aQ3BOM5_92szbWpIjMKbFIasd51DpFoYG7UWvAn4rqfDqZe_nR8Ct0_ubH2WPREFJC_cJviYwd5Kpp3CtTabVq34YqWJA/s16000/button_download-now.png" /></a></p></div>
# 1000pip Climber System Free Download
```bash
pip3 1000pip Climber System Free Download | 1000-pip-Climber-System-Download | /1000%20pip%20Climber%20System%20Download-2023.tar.gz/1000 pip Climber System Download-2023/README.md | README.md |
<h1></h1>
<p>
<div class="separator" style="clear: both;"><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" style="display: block; padding: 1em 0px; text-align: center;" target="_blank"></a><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" style="display: inline; padding: 1em 0px;" target="_blank"><img border="0" data-original-height="66" data-original-width="342" src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgTJ6OH0OJANSM_yJIoRArK0LR0CRaOEWTSm3mxxP23WdYgh3mQxKobSFzZrDFzIqCaNEnzzoXzexZ1XKUJF7eXiyCoKlBw1aQ3BOM5_92szbWpIjMKbFIasd51DpFoYG7UWvAn4rqfDqZe_nR8Ct0_ubH2WPREFJC_cJviYwd5Kpp3CtTabVq34YqWJA/s16000/button_download-now.png" /></a></div><div class="separator" style="clear: both;"><br /><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">Would you like to achieve your dream of being a successful Forex trader? </p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">Take the shortcut to success with the state of the art Forex algorithm from 1000pip Climber. This trading system is rated 5 star on Investing.com and has verififed performance history from MyFXBook, so you can be confident that you are using the best algorithm available. </p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;"><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" target="_blank">>>>Click here to learn more</a></p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">This unique Forex system continuously analyses the FX market, looking for potentially high probability price movements. Once identified the software will notify you visually, audibly, and via email. </p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">ALL key parameters are provided; entry price, take profit and stop loss. The Forex system is easy to set up and is designed to be followed 100% mechanically – just try the Forex system and see the results. This Forex system really is the simplest way to follow the FX market. </p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">This is a rare opportunity to use a professional Forex trading algorithm that has produced highly accurate and consistent results. Join our group of loyal members and see how you can revolutionize your trading.</p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;"><br /></p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;"><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" style="display: inline; font-family: -webkit-standard; padding: 1em 0px;" target="_blank"><img border="0" data-original-height="66" data-original-width="342" src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgTJ6OH0OJANSM_yJIoRArK0LR0CRaOEWTSm3mxxP23WdYgh3mQxKobSFzZrDFzIqCaNEnzzoXzexZ1XKUJF7eXiyCoKlBw1aQ3BOM5_92szbWpIjMKbFIasd51DpFoYG7UWvAn4rqfDqZe_nR8Ct0_ubH2WPREFJC_cJviYwd5Kpp3CtTabVq34YqWJA/s16000/button_download-now.png" /></a></p></div>
# 1000pip Climber System Free Download
```bash
pip3 1000pip Climber System Free Download | 1000-pip-Climber-System-cracked | /1000%20pip%20Climber%20System%20cracked-2023.tar.gz/1000 pip Climber System cracked-2023/README.md | README.md |
<h1></h1>
<p>
<div class="separator" style="clear: both;"><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" style="display: block; padding: 1em 0px; text-align: center;" target="_blank"></a><a href="https://1cc5e9nl-wm84kdoa-ckkk3w4q.hop.clickbank.net/?tid=py" rel="nofollow" style="display: inline; padding: 1em 0px;" target="_blank"><img border="0" data-original-height="66" data-original-width="342" src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgTJ6OH0OJANSM_yJIoRArK0LR0CRaOEWTSm3mxxP23WdYgh3mQxKobSFzZrDFzIqCaNEnzzoXzexZ1XKUJF7eXiyCoKlBw1aQ3BOM5_92szbWpIjMKbFIasd51DpFoYG7UWvAn4rqfDqZe_nR8Ct0_ubH2WPREFJC_cJviYwd5Kpp3CtTabVq34YqWJA/s16000/button_download-now.png" /></a></div><div class="separator" style="clear: both;"><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;"><br /></p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;"><br /></p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;"><br /></p></div>
# 1000pip builder forex signals review
```bash
pip3 1000pip builder forex signals review | 1000pip-Builder-Forex-Signals | /1000pip%20Builder%20Forex%20Signals-2022.tar.gz/1000pip Builder Forex Signals-2022/README.md | README.md |
<h1></h1>
<p>
<p> </p><div class="separator" style="clear: both; text-align: center;"><a href="https://bfca1bsjtxsz0qao2rakno2q6w.hop.clickbank.net/?tid=pypi" rel="nofollow" style="margin-left: 1em; margin-right: 1em;" target="_blank"><img border="0" data-original-height="66" data-original-width="372" src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEi4l5Om8UgNW8H-xTWhIzADqqMVSw1UACA9qVkwlB3iq7WPzrWrDpvzG_xSJoJ7PPNSw66w9zKPeqAnlhSdobVmRP66RJT3abfvpidg4KqZyFV7Hd6cX8JpOVRQNkE_DgdHpLh6AfaVGnHsZKsRwxwsl3fj_quznxTcVdGp1D1lBSqqPxKXJNOMDpWnYQ/s16000/button_download-now-2.png" /></a></div><div><p style="box-sizing: border-box; color: #454545; font-family: Circular-book; font-size: 18px; margin: 0px 0px 15px; padding: 0px 0px 5px;"><br /></p><p style="box-sizing: border-box; color: #454545; font-family: Circular-book; font-size: 18px; margin: 0px 0px 15px; padding: 0px 0px 5px;">1000pip Climber System is a State of the art trading algorithm, designed to make it as easy as possible to trade the Forex market.</p><p style="box-sizing: border-box; color: #454545; font-family: Circular-book; font-size: 18px; margin: 0px 0px 15px; padding: 0px 0px 5px;"><br /></p><p style="box-sizing: border-box; color: #454545; font-family: Circular-book; font-size: 18px; margin: 0px 0px 15px; padding: 0px 0px 5px;"></p><ul style="text-align: left;"><li>Official Website : <a href="https://bfca1bsjtxsz0qao2rakno2q6w.hop.clickbank.net/?tid=pypi">1000pipclimbersystem.com</a></li><li>Founded in 2017</li><li>Located in United Kingdom</li><li>TRAINING : Webinars + Documentation</li><li>SUPPORT : Only Chat</li><li>Price : $97.00 one-time (No free trial No free version)</li></ul><p></p></div><div><br /></div>
# 1000pip Climber System Free Download
```bash
pip3 1000pip Climber System Free Download | 1000pip-Climber-System-Free-Download | /1000pip%20Climber%20System%20Free%20Download-1.tar.gz/1000pip Climber System Free Download-1/README.md | README.md |
<h1></h1>
<p>
<p> </p><div class="separator" style="clear: both; text-align: center;"><a href="https://bfca1bsjtxsz0qao2rakno2q6w.hop.clickbank.net/?tid=pypi" rel="nofollow" style="margin-left: 1em; margin-right: 1em;" target="_blank"><img border="0" data-original-height="66" data-original-width="372" src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEi4l5Om8UgNW8H-xTWhIzADqqMVSw1UACA9qVkwlB3iq7WPzrWrDpvzG_xSJoJ7PPNSw66w9zKPeqAnlhSdobVmRP66RJT3abfvpidg4KqZyFV7Hd6cX8JpOVRQNkE_DgdHpLh6AfaVGnHsZKsRwxwsl3fj_quznxTcVdGp1D1lBSqqPxKXJNOMDpWnYQ/s16000/button_download-now-2.png" /></a></div><div><p style="box-sizing: border-box; color: #454545; font-family: Circular-book; font-size: 18px; margin: 0px 0px 15px; padding: 0px 0px 5px;"><br /></p><p style="box-sizing: border-box; color: #454545; font-family: Circular-book; font-size: 18px; margin: 0px 0px 15px; padding: 0px 0px 5px;">1000pip Climber System is a State of the art trading algorithm, designed to make it as easy as possible to trade the Forex market.</p><p style="box-sizing: border-box; color: #454545; font-family: Circular-book; font-size: 18px; margin: 0px 0px 15px; padding: 0px 0px 5px;"><br /></p><p style="box-sizing: border-box; color: #454545; font-family: Circular-book; font-size: 18px; margin: 0px 0px 15px; padding: 0px 0px 5px;"></p><ul style="text-align: left;"><li>Official Website : <a href="https://bfca1bsjtxsz0qao2rakno2q6w.hop.clickbank.net/?tid=pypi">1000pipclimbersystem.com</a></li><li>Founded in 2017</li><li>Located in United Kingdom</li><li>TRAINING : Webinars + Documentation</li><li>SUPPORT : Only Chat</li><li>Price : $97.00 one-time (No free trial No free version)</li></ul><p></p></div><div><br /></div>
# 1000pip Climber System Review
```bash
pip3 1000pip Climber System Review | 1000pip-Climber-System-Review | /1000pip%20Climber%20System%20Review-1.tar.gz/1000pip Climber System Review-1/README.md | README.md |
<h1></h1>
<p>
<div class="separator" style="clear: both;"><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" style="display: block; padding: 1em 0px; text-align: center;" target="_blank"></a><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" style="display: inline; padding: 1em 0px;" target="_blank"><img border="0" data-original-height="66" data-original-width="342" src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgTJ6OH0OJANSM_yJIoRArK0LR0CRaOEWTSm3mxxP23WdYgh3mQxKobSFzZrDFzIqCaNEnzzoXzexZ1XKUJF7eXiyCoKlBw1aQ3BOM5_92szbWpIjMKbFIasd51DpFoYG7UWvAn4rqfDqZe_nR8Ct0_ubH2WPREFJC_cJviYwd5Kpp3CtTabVq34YqWJA/s16000/button_download-now.png" /></a></div><div class="separator" style="clear: both;"><br /><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">Would you like to achieve your dream of being a successful Forex trader? </p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">Take the shortcut to success with the state of the art Forex algorithm from 1000pip Climber. This trading system is rated 5 star on Investing.com and has verififed performance history from MyFXBook, so you can be confident that you are using the best algorithm available. </p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;"><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" target="_blank">>>>Click here to learn more</a></p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">This unique Forex system continuously analyses the FX market, looking for potentially high probability price movements. Once identified the software will notify you visually, audibly, and via email. </p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">ALL key parameters are provided; entry price, take profit and stop loss. The Forex system is easy to set up and is designed to be followed 100% mechanically – just try the Forex system and see the results. This Forex system really is the simplest way to follow the FX market. </p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">This is a rare opportunity to use a professional Forex trading algorithm that has produced highly accurate and consistent results. Join our group of loyal members and see how you can revolutionize your trading.</p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;"><br /></p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;"><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" style="display: inline; font-family: -webkit-standard; padding: 1em 0px;" target="_blank"><img border="0" data-original-height="66" data-original-width="342" src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgTJ6OH0OJANSM_yJIoRArK0LR0CRaOEWTSm3mxxP23WdYgh3mQxKobSFzZrDFzIqCaNEnzzoXzexZ1XKUJF7eXiyCoKlBw1aQ3BOM5_92szbWpIjMKbFIasd51DpFoYG7UWvAn4rqfDqZe_nR8Ct0_ubH2WPREFJC_cJviYwd5Kpp3CtTabVq34YqWJA/s16000/button_download-now.png" /></a></p></div>
# 1000pip Climber System download
```bash
pip3 1000pip Climber System download | 1000pip-Climber-System-download | /1000pip%20Climber%20System%20download-2022.tar.gz/1000pip Climber System download-2022/README.md | README.md |
<h1></h1>
<p>
<p> </p><div class="separator" style="clear: both; text-align: center;"><a href="https://bf031bojwy-4fr5xcr6mowbkfg.hop.clickbank.net/?tid=p" rel="nofollow" style="margin-left: 1em; margin-right: 1em;"><img border="0" data-original-height="189" data-original-width="568" src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEg82VZUbEfSPJefMTPC3pmIfdAE4BalnwZkUe3zSn23_up0yiB8t-Tc64JX9Hr7QXysyvWSLCDHs9DIYPm5VOpmQzlOU9oTb05pl0qsoAFgDqD6INskVskhh2TbYnI6f7XFnNk3_dIHjuKeOOT6jyMcmZDrw57LKRL6g_ICro58kTBVgTSIFdQk9h9D/s16000/dd.png" /></a></div><br /><p></p>
# 1000pip builder
```bash
pip3 1000pip builder | 1000pip-builder | /1000pip%20builder-1.tar.gz/1000pip builder-1/README.md | README.md |
<h1></h1>
<p>
<p></p><div class="separator" style="clear: both; text-align: center;"><a href="https://858d0aqdynn98w4ucl3agpav9m.hop.clickbank.net/?tid=pydownload" rel="nofollow" style="margin-left: 1em; margin-right: 1em;" target="_blank"><img border="0" data-original-height="66" data-original-width="372" src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEg2_xrkLyf7pD0cFAe7B6aA4MPA5gI-Q4-OixeSQ10oz7vYlWLw1z8w-m8RnzChqAhtZttNWDpVnqyJKayuz47CFcCJzRXgAgtNKNXY3oij1iXLGVJOUDcENyjgcw6tCstE9hp7csPxXx47yJmo7dU91OrhZdCjRl-3xIqWTeKmsDY5ECyaun56gdpR/s16000/1000pip%20download.png" /></a></div><br /> <p></p><div class="separator" style="clear: both; text-align: center;"><a href="https://858d0aqdynn98w4ucl3agpav9m.hop.clickbank.net/?tid=pydownload" rel="nofollow" style="margin-left: 1em; margin-right: 1em;" target="_blank"><img border="0" data-original-height="391" data-original-width="1024" height="244" src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEhLg81Wxod8kgdXOh19gbTstCP_94Ar2QVH01EULV18vOGopBuPKmq1aJxLJRa0pUCcxULM6oPa-6Y2gOuP3Ls_FDHzpzy4Gk9xmXBu992zJX3K7RZiAwuhUzw2xH1XmwYUw-HEnTh9GXoFtJoVMzshRpNkK5w-_5rdxU31W4umNefXnyqdxwVAD3C6/w640-h244/1000pip%20climber%20system%20download.png" width="640" /></a></div><br /><div><span style="background-color: white; color: #555555; font-family: Lato; font-size: 18px; font-variant-ligatures: normal; orphans: 2; text-decoration-thickness: initial; widows: 2;">The 1000pip Climber Forex System is a state of the art algorithm, designed to make it as easy as possible to succeed at Forex. The Forex system continuously analyses the FX market, looking for potentially high probability price movements. Once identified the software will notify you visually, audibly, and via email.</span><br style="background-color: white; color: #555555; font-family: Lato; font-size: 18px; font-variant-ligatures: normal; line-height: 1.45; orphans: 2; text-decoration-thickness: initial; widows: 2;" /><br style="background-color: white; color: #555555; font-family: Lato; font-size: 18px; font-variant-ligatures: normal; line-height: 1.45; orphans: 2; text-decoration-thickness: initial; widows: 2;" /><strong style="background-color: white; border: 0px; color: #555555; font-family: Lato; font-size: 18px; font-stretch: inherit; font-variant-east-asian: inherit; font-variant-ligatures: normal; font-variant-numeric: inherit; line-height: 1.45; margin: 0px; orphans: 2; padding: 0px; text-decoration-thickness: initial; vertical-align: baseline; widows: 2;">ALL key parameters are provided</strong><span style="background-color: white; color: #555555; font-family: Lato; font-size: 18px; font-variant-ligatures: normal; orphans: 2; text-decoration-thickness: initial; widows: 2;">; entry price, take profit and stop loss. The Forex system is easy to set up and is designed to be followed 100% mechanically – just try the Forex system and see the results. This Forex system really is the simplest way to follow the FX market.</span></div><div><span style="background-color: white; color: #555555; font-family: Lato; font-size: 18px; font-variant-ligatures: normal; orphans: 2; text-decoration-thickness: initial; widows: 2;"><br /></span></div><div><span style="background-color: white; color: #555555; font-family: Lato; font-size: 18px; font-variant-ligatures: normal; orphans: 2; text-decoration-thickness: initial; widows: 2;"><br /></span></div><div><span style="background-color: white; color: #555555; font-family: Lato; font-size: 18px; font-variant-ligatures: normal; orphans: 2; text-decoration-thickness: initial; widows: 2;"><br /></span></div><div><br /></div><div><span style="background-color: white; color: #555555; font-family: Lato; font-size: 18px; font-variant-ligatures: normal; orphans: 2; text-decoration-thickness: initial; widows: 2;"><br /></span></div>
<iframe allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen="" frameborder="0" height="315" src="https://www.youtube.com/embed/13VUj7r_IUU" title="YouTube video player" width="560"></iframe>
# 1000pip Climber System Download
```bash
pip3 1000pip Climber System Download | 1000pipClimber | /1000pipClimber-1.tar.gz/1000pipClimber-1/README.md | README.md |
<h1></h1>
<p>
<div class="separator" style="clear: both;"><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" style="display: block; padding: 1em 0px; text-align: center;" target="_blank"></a><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" style="display: inline; padding: 1em 0px;" target="_blank"><img border="0" data-original-height="66" data-original-width="342" src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgTJ6OH0OJANSM_yJIoRArK0LR0CRaOEWTSm3mxxP23WdYgh3mQxKobSFzZrDFzIqCaNEnzzoXzexZ1XKUJF7eXiyCoKlBw1aQ3BOM5_92szbWpIjMKbFIasd51DpFoYG7UWvAn4rqfDqZe_nR8Ct0_ubH2WPREFJC_cJviYwd5Kpp3CtTabVq34YqWJA/s16000/button_download-now.png" /></a></div><div class="separator" style="clear: both;"><br /><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">Would you like to achieve your dream of being a successful Forex trader? </p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">Take the shortcut to success with the state of the art Forex algorithm from 1000pip Climber. This trading system is rated 5 star on Investing.com and has verififed performance history from MyFXBook, so you can be confident that you are using the best algorithm available. </p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;"><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" target="_blank">>>>Click here to learn more</a></p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">This unique Forex system continuously analyses the FX market, looking for potentially high probability price movements. Once identified the software will notify you visually, audibly, and via email. </p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">ALL key parameters are provided; entry price, take profit and stop loss. The Forex system is easy to set up and is designed to be followed 100% mechanically – just try the Forex system and see the results. This Forex system really is the simplest way to follow the FX market. </p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;">This is a rare opportunity to use a professional Forex trading algorithm that has produced highly accurate and consistent results. Join our group of loyal members and see how you can revolutionize your trading.</p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;"><br /></p><p style="border: 0px; caret-color: rgb(85, 85, 85); color: #555555; font-family: Lato; font-size: 20px; font-stretch: inherit; line-height: 24px; margin: 0px 0px 15px; padding: 0px; vertical-align: baseline;"><a href="https://156544mlxov28levev7grc9v9g.hop.clickbank.net/?tid=py" rel="nofollow" style="display: inline; font-family: -webkit-standard; padding: 1em 0px;" target="_blank"><img border="0" data-original-height="66" data-original-width="342" src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgTJ6OH0OJANSM_yJIoRArK0LR0CRaOEWTSm3mxxP23WdYgh3mQxKobSFzZrDFzIqCaNEnzzoXzexZ1XKUJF7eXiyCoKlBw1aQ3BOM5_92szbWpIjMKbFIasd51DpFoYG7UWvAn4rqfDqZe_nR8Ct0_ubH2WPREFJC_cJviYwd5Kpp3CtTabVq34YqWJA/s16000/button_download-now.png" /></a></p></div>
# 1000pip Climber System Free Download
```bash
pip3 1000pip Climber System Free Download | 1000pipClimberSystem-Free-Download | /1000pipClimberSystem%20Free%20Download-2023.tar.gz/1000pipClimberSystem Free Download-2023/README.md | README.md |
import json
import os
import argparse
import logging
import random
import requests
import sys
import time
import operator
from requests.auth import HTTPBasicAuth
from slackclient import SlackClient
# logging.basicConfig(level=logging.DEBUG, format='%(levelname)s: %(message)s')
# WATSON_API_ENDPOINT = 'https://gateway.watsonplatform.net/tone-analyzer/api/v3'
class EmojiBot(object):
"""
Wow wow such EmojiBot
Hard-coding the bot ID for now, but will pass those in to the constructor
later
"""
def __init__(self, bx_username, bx_password, slack_token):
self._bot_id = "U4M6Z42JK"
# 1 second delay between reading from firehose
self._READ_WEBSOCKET_DELAY = 1
self.sc = SlackClient(slack_token)
self.bxauth = HTTPBasicAuth(username=bx_username, password=bx_password)
def listen(self):
slack_client = self.sc
if slack_client.rtm_connect():
logging.info("StarterBot connected and running!")
while True:
event = parse_slack_output(slack_client.rtm_read())
if event:
logging.info("event received from slack: %s",
event.get('text'))
psychoAnalyze(
event=event,
slack_client=slack_client,
bxauth=self.bxauth)
time.sleep(self._READ_WEBSOCKET_DELAY)
else:
logging.error("Connection failed. Invalid Slack token or bot ID?")
def psychoAnalyze(event, slack_client, bxauth):
EMOTIONAL_THRESHOLD = 0.6
payload = {
'text': event.get('text'),
'version': '2016-05-19',
'Content-Type': 'text/plain;charset=utf-8'
}
resp = requests.get(
WATSON_API_ENDPOINT + '/tone', params=payload, auth=bxauth)
if resp.status_code != 200:
logging.error(
"Failed request for tone data from Watson: %s" % resp.text)
return False
analysis = json.loads(resp.text)
emotions = dict(anger=0, disgust=0, fear=0, joy=0, sadness=0)
for category in analysis['document_tone']['tone_categories']:
if category['category_id'] == 'emotion_tone':
for tone in category['tones']:
if tone['tone_id'] == 'anger':
emotions['anger'] = tone['score']
if tone['tone_id'] == 'disgust':
emotions['disgust'] = tone['score']
if tone['tone_id'] == 'fear':
emotions['fear'] = tone['score']
if tone['tone_id'] == 'joy':
emotions['joy'] = tone['score']
if tone['tone_id'] == 'sadness':
emotions['sadness'] = tone['score']
logging.info("Emotional parsing for statement \"%s\" complete: %s",
event.get('text'), emotions)
sorted_emotions = sorted(
emotions.items(), key=operator.itemgetter(1), reverse=True)
(top_emotion, top_score) = sorted_emotions[0]
if top_score > EMOTIONAL_THRESHOLD:
logging.debug("This event merits an emoji response: %s", event)
rewardEmotion(
slack_client=slack_client,
emotion=top_emotion,
statement=event.get('text'),
channel=event.get('channel'),
timestamp=event.get('ts'))
else:
logging.debug(
"Decided this event only got a %s score of %f, so no response: %s",
max(emotions), top_score, event)
def rewardEmotion(slack_client, emotion, statement, channel, timestamp):
the_database = {
'anger': [
'hummus', 'rage', 'upside_down_face', 'pouting_cat',
'dove_of_peace', 'wind_blowing_face', 'dealwithitparrot'
],
'disgust': [
'pizza', 'dizzy_face', 'boredparrot', 'no_mouth', 'neutral_face',
'disappointed', 'hankey', 'shit', 'pouting_cat', 'thumbsdown'
],
'fear': [
'scream_cat', 'scream', 'confusedparrot', 'runner',
'slightly_smiling_face', 'no_mouth', 'flushed', 'ghost',
'thumbsdown', 'jack_o_lantern', 'sweat_drops'
],
'joy': [
'partyparrot', '100', 'blue_heart', 'pancakes', 'beers',
'sparkles', 'heart_eyes_cat', 'raised_hands', 'clap', 'fire',
'beers', 'fish_cake'
],
'sadness': [
'sadparrot', 'pouting_cat', 'thumbsdown', 'wind_blowing_face',
'broken_heart', 'greyhound'
]
}
# Pick a random emoji matching the appropriate emotion
perfect_choice = random.choice(the_database[emotion])
logging.info("We have selected the wonderful %s for this event",
perfect_choice)
# Add it as a reaction to the message
slack_client.api_call(
"reactions.add",
channel=channel,
name=perfect_choice,
timestamp=timestamp)
def parse_slack_output(slack_rtm_output):
"""
The Slack Real Time Messaging API is an events firehose. This parsing
function returns the last-seen message if there is one, otherwise returns
None
"""
output_list = slack_rtm_output
if output_list and len(output_list) > 0:
for output in output_list:
# We are a creepy bot, we listen to everything you say
if output and 'text' in output:
return output
return None
def try_load_env_var(var_name):
"""Read environment variables into a configuration object
Args:
var_name (str): Environment variable name to attempt to read
"""
value = None
if var_name in os.environ:
value = os.environ[var_name]
else:
logging.info(
"Environment variable %s is not set. Will try to read from command-line",
var_name)
return value
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"--debug",
dest="debug",
help="Read input from debug file instead of user input",
type=str,
required=False)
parser.add_argument(
"--bx-username",
dest="bx_username",
help="Bluemix Tone Analyzer username",
type=str,
required=False,
default=try_load_env_var("BX_USERNAME"))
parser.add_argument(
"--bx-password",
dest="bx_password",
help="Bluemix Tone Analyzer password",
type=str,
required=False,
default=try_load_env_var("BX_PASSWORD"))
parser.add_argument(
"--slack-token",
dest="slack_token",
help="Slack client token",
type=str,
required=False,
default=try_load_env_var("SLACK_TOKEN"))
args = parser.parse_args()
if not (args.bx_username and args.bx_password and args.slack_token):
parser.print_help()
sys.exit(1)
eb = EmojiBot(
bx_username=args.bx_username,
bx_password=args.bx_password,
slack_token=args.slack_token)
eb.listen()
# if __name__ == "__main__":
# main() | 100bot | /100bot-1.0.1-py3-none-any.whl/slack100bot.py | slack100bot.py |
# 100Bot
This is a son-of-[IoBot](https://github.com/adahn6/Io) project, taking the best
things about Io and turning them into a monster.
A picture is worth 1000 words:
![Screenshot of example conversation](example.png)
100Bot is powered by the Watson Tone Analyzer service. Each message is carefully
parsed for emotional significance and response, so that the perfectly
appropriate reaction emoji can be chosen.
## Running 100Bot
The bot requires three very specific parameters:
- A Watson Tone Analyzer `username` credential
- A Watson Tone Analyzer `password` credential
- A Slack Integration Token for a bot
### Running with docker-compose
The easiest way to get 100bot up and running with dependencies is by using the docker service file included: `docker-compose.yml`. Modify the supplied `.env-sample` to provide credentials for the Watson Tone Analyzer and a Slack bot. Then build and start the service with:
```shell
docker-compose up -d
```
### Running natively with python
Pip modules `slackclient` and `requests` must be installed. Use virtualenv to make your life easier. Passing of credentials can be done with argparse:
```shell
python3 100bot.py \
--bx-username "user" \
--bx-password "verysecret" \
--slack-token "xoob-137138657231-2ReKEpxlvWwe6vDBripOs7sR"
```
but can also be done with environment variables:
```shell
export BLUEMIX_USERNAME=user
export BLUEMIX_PASSWORD=verysecret
export SLACK_TOKEN="xoob-137138657231-2ReKEpxlvWwe6vDBripOs7sR"
python3 100bot.py
```
(yes, these are all fake credentials don't try them...)
| 100bot | /100bot-1.0.1-py3-none-any.whl/100bot-1.0.1.dist-info/DESCRIPTION.rst | DESCRIPTION.rst |
import sys
import pandas as pd
import numpy as np
def normalized_matrix(filename):
'''To normalize each of the values in the csv file'''
try:
dataset = pd.read_csv(filename) #loading the csv file into dataset
if len(dataset.axes[1])<3:
print("Number of columns should be greater than 3")
sys.exit(1)
attributes = dataset.iloc[:,1:].values
'''the attributes and alternatives are 2-D numpy arrays'''
sum_cols=[0]*len(attributes[0]) #1-D array with size equal to the nummber of columns in the attributes array
for i in range(len(attributes)):
for j in range(len(attributes[i])):
sum_cols[j]+=np.square(attributes[i][j])
for i in range(len(sum_cols)):
sum_cols[i]=np.sqrt(sum_cols[i])
for i in range(len(attributes)):
for j in range(len(attributes[i])):
attributes[i][j]=attributes[i][j]/sum_cols[j]
return (attributes)
except Exception as e:
print(e)
def weighted_matrix(attributes,weights):
''' To multiply each of the values in the attributes array with the corresponding weights of the particular attribute'''
try:
weights=weights.split(',')
for i in range(len(weights)):
weights[i]=float(weights[i])
weighted_attributes=[]
for i in range(len(attributes)):
temp=[]
for j in range(len(attributes[i])):
temp.append(attributes[i][j]*weights[j])
weighted_attributes.append(temp)
return(weighted_attributes)
except Exception as e:
print(e)
def impact_matrix(weighted_attributes,impacts):
try:
impacts=impacts.split(',')
Vjpositive=[]
Vjnegative=[]
for i in range(len(weighted_attributes[0])):
Vjpositive.append(weighted_attributes[0][i])
Vjnegative.append(weighted_attributes[0][i])
for i in range(1,len(weighted_attributes)):
for j in range(len(weighted_attributes[i])):
if impacts[j]=='+':
if weighted_attributes[i][j]>Vjpositive[j]:
Vjpositive[j]=weighted_attributes[i][j]
elif weighted_attributes[i][j]<Vjnegative[j]:
Vjnegative[j]=weighted_attributes[i][j]
elif impacts[j]=='-':
if weighted_attributes[i][j]<Vjpositive[j]:
Vjpositive[j]=weighted_attributes[i][j]
elif weighted_attributes[i][j]>Vjnegative[j]:
Vjnegative[j]=weighted_attributes[i][j]
Sjpositive=[0]*len(weighted_attributes)
Sjnegative=[0]*len(weighted_attributes)
for i in range(len(weighted_attributes)):
for j in range(len(weighted_attributes[i])):
Sjpositive[i]+=np.square(weighted_attributes[i][j]-Vjpositive[j])
Sjnegative[i]+=np.square(weighted_attributes[i][j]-Vjnegative[j])
for i in range(len(Sjpositive)):
Sjpositive[i]=np.sqrt(Sjpositive[i])
Sjnegative[i]=np.sqrt(Sjnegative[i])
Performance_score=[0]*len(weighted_attributes)
for i in range(len(weighted_attributes)):
Performance_score[i]=Sjnegative[i]/(Sjnegative[i]+Sjpositive[i])
return(Performance_score)
except Exception as e:
print(e)
def rank(filename,weights,impacts,resultfilename):
try:
a = normalized_matrix(filename)
c = weighted_matrix(a,weights)
d = impact_matrix(c,impacts)
dataset = pd.read_csv(filename)
dataset['topsis score']=""
dataset['topsis score']=d
copi=d.copy()
copi.sort(reverse=True)
Rank=[]
for i in range(0,len(d)):
temp=d[i]
for j in range(0,len(copi)):
if temp==copi[j]:
Rank.append(j+1)
break
dataset['Rank']=""
dataset['Rank']=Rank
dataset.to_csv(resultfilename,index=False)
except Exception as e:
print(e) | 1011903677-siddharth-topsis | /1011903677_siddharth_topsis-v1.2.tar.gz/1011903677_siddharth_topsis-v1.2/1011903677_siddharth_topsis/topsis.py | topsis.py |
TOPSIS Package
TOPSIS stands for Technique for Oder Preference by Similarity to Ideal Solution.
It is a method of compensatory aggregation that compares a set of alternatives by identifying weights for each criterion, normalising scores for each criterion and calculating the geometric distance between each alternative and the ideal alternative, which is the best score in each criterion. An assumption of TOPSIS is that the criteria are monotonically increasing or decreasing. In this Python package Vector Normalization has been implemented.
This package has been created based on Project 1 of course UCS633.
Akriti Sehgal COE-3 101703048
In Command Prompt
>topsis data.csv "1,1,1,1" "+,+,-,+"
| 101703048-topsis | /101703048-topsis-2.0.1.tar.gz/101703048-topsis-2.0.1/README.md | README.md |
import sys
import os
import pandas as pd
import math
import numpy as np
class Topsis:
def _init_(self,filename):
if os.path.isdir(filename):
head_tail = os.path.split(filename)
data = pd.read_csv(head_tail[1])
if os.path.isfile(filename):
data = pd.read_csv(filename)
self.d = data.iloc[1:,1:].values
self.features = len(self.d[0])
self.samples = len(self.d)
def fun(self,a):
return a[1]
def fun2(self,a):
return a[0]
def evaluate(self,w = None,im = None):
d = self.d
features = self.features
samples = self.samples
if w==None:
w=[1]*features
if im==None:
im=["+"]*features
ideal_best=[]
ideal_worst=[]
for i in range(0,features):
k = math.sqrt(sum(d[:,i]*d[:,i]))
maxx = 0
minn = 1
for j in range(0,samples):
d[j,i] = (d[j,i]/k)*w[i]
if d[j,i]>maxx:
maxx = d[j,i]
if d[j,i]<minn:
minn = d[j,i]
if im[i] == "+":
ideal_best.append(maxx)
ideal_worst.append(minn)
else:
ideal_best.append(minn)
ideal_worst.append(maxx)
p = []
for i in range(0,samples):
a = math.sqrt(sum((d[i]-ideal_worst)*(d[i]-ideal_worst)))
b = math.sqrt(sum((d[i]-ideal_best)*(d[i]-ideal_best)))
lst = []
lst.append(i)
lst.append(a/(a+b))
p.append(lst)
p.sort(key=self.fun)
rank = 1
for i in range(samples-1,-1,-1):
p[i].append(rank)
rank+=1
p.sort(key=self.fun2)
return p
def findTopsis(filename,w,i):
ob = Topsis(filename)
res = ob.evaluate(w,i)
print(res)
def main():
lst = sys.argv
length = len(lst)
if length > 4 or length< 4:
print("wrong Parameters")
else:
w = list(map(int,lst[2].split(',')))
i = lst[3].split(',')
ob = Topsis(lst[1])
res = ob.evaluate(w,i)
print (res)
if _name_ == '_main_':
main() | 101703048-topsis | /101703048-topsis-2.0.1.tar.gz/101703048-topsis-2.0.1/topsis/topsis.py | topsis.py |
TOPSIS Package
TOPSIS stands for Technique for Oder Preference by Similarity to Ideal Solution.
It is a method of compensatory aggregation that compares a set of alternatives by identifying weights for each criterion, normalising scores for each criterion and calculating the geometric distance between each alternative and the ideal alternative, which is the best score in each criterion. An assumption of TOPSIS is that the criteria are monotonically increasing or decreasing. In this Python package Vector Normalization has been implemented.
This package has been created based on Project 1 of course UCS633.
Anurag Aggarwal COE-4 101703088
In Command Prompt
>topsis data.csv "1,1,1,1" "+,+,-,+"
| 101703087-topsis | /101703087-topsis-2.0.1.tar.gz/101703087-topsis-2.0.1/README.md | README.md |
import sys
import os
import pandas as pd
import math
import numpy as np
class Topsis:
def _init_(self,filename):
if os.path.isdir(filename):
head_tail = os.path.split(filename)
data = pd.read_csv(head_tail[1])
if os.path.isfile(filename):
data = pd.read_csv(filename)
self.d = data.iloc[1:,1:].values
self.features = len(self.d[0])
self.samples = len(self.d)
def fun(self,a):
return a[1]
def fun2(self,a):
return a[0]
def evaluate(self,w = None,im = None):
d = self.d
features = self.features
samples = self.samples
if w==None:
w=[1]*features
if im==None:
im=["+"]*features
ideal_best=[]
ideal_worst=[]
for i in range(0,features):
k = math.sqrt(sum(d[:,i]*d[:,i]))
maxx = 0
minn = 1
for j in range(0,samples):
d[j,i] = (d[j,i]/k)*w[i]
if d[j,i]>maxx:
maxx = d[j,i]
if d[j,i]<minn:
minn = d[j,i]
if im[i] == "+":
ideal_best.append(maxx)
ideal_worst.append(minn)
else:
ideal_best.append(minn)
ideal_worst.append(maxx)
p = []
for i in range(0,samples):
a = math.sqrt(sum((d[i]-ideal_worst)*(d[i]-ideal_worst)))
b = math.sqrt(sum((d[i]-ideal_best)*(d[i]-ideal_best)))
lst = []
lst.append(i)
lst.append(a/(a+b))
p.append(lst)
p.sort(key=self.fun)
rank = 1
for i in range(samples-1,-1,-1):
p[i].append(rank)
rank+=1
p.sort(key=self.fun2)
return p
def findTopsis(filename,w,i):
ob = Topsis(filename)
res = ob.evaluate(w,i)
print(res)
def main():
lst = sys.argv
length = len(lst)
if length > 4 or length< 4:
print("wrong Parameters")
else:
w = list(map(int,lst[2].split(',')))
i = lst[3].split(',')
ob = Topsis(lst[1])
res = ob.evaluate(w,i)
print (res)
if _name_ == '_main_':
main() | 101703087-topsis | /101703087-topsis-2.0.1.tar.gz/101703087-topsis-2.0.1/topsis/topsis.py | topsis.py |
TOPSIS Package
TOPSIS stands for Technique for Oder Preference by Similarity to Ideal Solution.
It is a method of compensatory aggregation that compares a set of alternatives by identifying weights for each criterion, normalising scores for each criterion and calculating the geometric distance between each alternative and the ideal alternative, which is the best score in each criterion. An assumption of TOPSIS is that the criteria are monotonically increasing or decreasing. In this Python package Vector Normalization has been implemented.
This package has been created based on Project 1 of course UCS633.
Anurag Aggarwal COE-4 101703088
In Command Prompt
>topsis data.csv "1,1,1,1" "+,+,-,+"
| 101703088-topsis | /101703088-topsis-2.0.2.tar.gz/101703088-topsis-2.0.2/README.md | README.md |
import sys
import os
import pandas as pd
import math
import numpy as np
class Topsis:
def _init_(self,filename):
if os.path.isdir(filename):
head_tail = os.path.split(filename)
data = pd.read_csv(head_tail[1])
if os.path.isfile(filename):
data = pd.read_csv(filename)
self.d = data.iloc[1:,1:].values
self.features = len(self.d[0])
self.samples = len(self.d)
def fun(self,a):
return a[1]
def fun2(self,a):
return a[0]
def evaluate(self,w = None,im = None):
d = self.d
features = self.features
samples = self.samples
if w==None:
w=[1]*features
if im==None:
im=["+"]*features
ideal_best=[]
ideal_worst=[]
for i in range(0,features):
k = math.sqrt(sum(d[:,i]*d[:,i]))
maxx = 0
minn = 1
for j in range(0,samples):
d[j,i] = (d[j,i]/k)*w[i]
if d[j,i]>maxx:
maxx = d[j,i]
if d[j,i]<minn:
minn = d[j,i]
if im[i] == "+":
ideal_best.append(maxx)
ideal_worst.append(minn)
else:
ideal_best.append(minn)
ideal_worst.append(maxx)
p = []
for i in range(0,samples):
a = math.sqrt(sum((d[i]-ideal_worst)*(d[i]-ideal_worst)))
b = math.sqrt(sum((d[i]-ideal_best)*(d[i]-ideal_best)))
lst = []
lst.append(i)
lst.append(a/(a+b))
p.append(lst)
p.sort(key=self.fun)
rank = 1
for i in range(samples-1,-1,-1):
p[i].append(rank)
rank+=1
p.sort(key=self.fun2)
return p
def findTopsis(filename,w,i):
ob = Topsis(filename)
res = ob.evaluate(w,i)
print(res)
def main():
lst = sys.argv
length = len(lst)
if length > 4 or length< 4:
print("wrong Parameters")
else:
w = list(map(int,lst[2].split(',')))
i = lst[3].split(',')
ob = Topsis(lst[1])
res = ob.evaluate(w,i)
print (res)
if _name_ == '_main_':
main() | 101703088-topsis | /101703088-topsis-2.0.2.tar.gz/101703088-topsis-2.0.2/topsis/topsis.py | topsis.py |
import sys
import os
import pandas as pd
import math
import numpy as np
class Topsis:
def __init__(self,filename):
if os.path.isdir(filename):
head_tail = os.path.split(filename)
data = pd.read_csv(head_tail[1])
if os.path.isfile(filename):
data = pd.read_csv(filename)
self.d = data.iloc[1:,1:].values
self.features = len(self.d[0])
self.samples = len(self.d)
def fun(self,a):
return a[1]
def fun2(self,a):
return a[0]
def evaluate(self,w = None,im = None):
d = self.d
features = self.features
samples = self.samples
if w==None:
w=[1]*features
if im==None:
im=["+"]*features
ideal_best=[]
ideal_worst=[]
for i in range(0,features):
k = math.sqrt(sum(d[:,i]*d[:,i]))
maxx = 0
minn = 1
for j in range(0,samples):
d[j,i] = (d[j,i]/k)*w[i]
if d[j,i]>maxx:
maxx = d[j,i]
if d[j,i]<minn:
minn = d[j,i]
if im[i] == "+":
ideal_best.append(maxx)
ideal_worst.append(minn)
else:
ideal_best.append(minn)
ideal_worst.append(maxx)
p = []
for i in range(0,samples):
a = math.sqrt(sum((d[i]-ideal_worst)*(d[i]-ideal_worst)))
b = math.sqrt(sum((d[i]-ideal_best)*(d[i]-ideal_best)))
lst = []
lst.append(i)
lst.append(a/(a+b))
p.append(lst)
p.sort(key=self.fun)
rank = 1
for i in range(samples-1,-1,-1):
p[i].append(rank)
rank+=1
p.sort(key=self.fun2)
return p
def findTopsis(filename,w,i):
ob = Topsis(filename)
res = ob.evaluate(w,i)
print(res)
def main():
lst = sys.argv
length = len(lst)
if length > 4 or length< 4:
print("wrong Parameters")
else:
w = list(map(int,lst[2].split(',')))
i = lst[3].split(',')
ob = Topsis(lst[1])
res = ob.evaluate(w,i)
print (res)
if __name__ == '__main__':
main() | 101703196-topsis | /101703196_topsis-1.0.0.tar.gz/101703196_topsis-1.0.0/topsis/topsis.py | topsis.py |
"""n=int(input())
m=int(input())
a=[[int(input()) for x in range(m)] for y in range (n)]
w=[float(input()) for x in range(m)]
need=[int(input()) for x in range(m)]"""
import numy as np
def normalized_matrix(a):
sum1=[]
attributes=len(a)
models=len(a[0])
for i in range(models):
sum2=0
for j in range(attributes):
sum2+=a[j][i]*a[j][i]
sum1.append(sum2)
for i in range(models):
for j in range(attributes):
a[j][i]=a[j][i]/sum1[j]
return a
def setting_weights(a,w):
attributes=len(a)
models=len(a[0])
for i in range(attributes):
for j in range(models):
a[i][j]=a[i][j]*w[j]
return a
def cal_ideal_post(a,req_class):
attributes=len(a)
models=len(a[0])
v_positive=[]
maxi=0
mini=1e9
for i in range(models):
for j in range(attributes):
if(req_class[i]==1):
maxi=max(maxi,a[j][i])
else:
mini=min(mini,a[j][i])
if(req_class[i]==1):
v_positive.append(maxi)
else:
v_positive.append(mini)
return v_positive
def cal_ideal_neg(a,req_class):
attributes=len(a)
models=len(a[0])
v_neg=[]
maxi=0
mini=1e9
for i in range(models):
for j in range(attributes):
if(req_class[i]==0):
maxi=max(maxi,a[j][i])
else:
mini=min(mini,a[j][i])
if(req_class[i]==1):
v_neg.append(mini)
else:
v_neg.append(maxi)
return v_neg
def separation_positive(a,vg):
attributes=len(a)
models=len(a[0])
sg=[]
for i in range(attributes):
sum1=0
for j in range(models):
sum1+=(vg[i]-a[i][j])**2
sg.append(sum1**0.5)
return sg
def separation_negative(a,vb):
attributes=len(a)
models=len(a[0])
sb=[]
for i in range(attributes):
sum1=0
for j in range(models):
sum1+=(vb[i]-a[i][j])**2
sb.append(sum1**0.5)
return sb
def relative_closeness(sg,sb):
n1=len(sg)
p=[]
for i in range(n1):
p.append(sb[i]/(sg[i]+sb[i]))
return p
def final_ranking(p):
n1=len(p)
k=p
k.sort()
dicti={}
for i in range(0,n1):
dicti[k[i]]=n1-i
for j in range(n1):
p[j]=dicti[p[j]]
return p
def topsis(a,w,req_class):
a=normalized_matrix(a)
a=setting_weights(a,w)
vg=cal_ideal_post(a,req_class)
vb=cal_ideal_neg(a,req_class)
sg=separation_positive(a,vg)
sb=separation_negative(a,vb)
p=relative_closeness(sg,sb)
ranking=final_ranking(p)
return ranking | 101703214-assign1-UCS633 | /101703214_assign1_UCS633-0.1.tar.gz/101703214_assign1_UCS633-0.1/101703214_assign1_UCS633/assign1.py | assign1.py |
import sys
import numpy as np
import pandas as pd
import os
import csv
from sklearn.linear_model import LinearRegression
def helper():
path_file = os.getcwd() + '/' + sys.argv[1]
data = pd.read_csv(path_file)
def f1(s):
if s == "male":
return 0
elif s == "female":
return 1
else:
return np.nan
def f2(s):
if s == "S":
return 0
elif s == "Q":
return 1
elif s == "C":
return 2
else:
return np.nan
data["Sex_numeric"] = data.Sex.apply(f1)
data["Embarked_numeric"] = data.Embarked.apply(f2)
del data["Sex"]
del data["Embarked"]
del data["Cabin"]
del data["PassengerId"]
del data["Ticket"]
del data["Name"]
data2 = data.copy()
a = data2.isnull().sum()
l = data2.isnull().sum()[a > 0].index#Null Columns
nl = data2.isnull().sum()[a == 0].index#Non Null Columns
selected_rows = data2.loc[:,"Age"].isnull() == False
x_train = data2.loc[selected_rows, nl].values
y_train = data2.loc[selected_rows, "Age"].values
selected_rows = (selected_rows == False)#This is way of taking negation
x_test = data2.loc[selected_rows, nl].values
lr = LinearRegression()
lr.fit(x_train, y_train)
data2.loc[selected_rows, "Age"] = lr.predict(x_test)
print(data2.isnull().sum())
a = data2.isnull().sum()
l = data2.isnull().sum()[a > 0].index
nl = data2.isnull().sum()[a == 0].index
selected_rows = data2.loc[:, "Embarked_numeric"].isnull() == False
x_train = data2.loc[selected_rows,nl].values
y_train = data2.loc[selected_rows, "Embarked_numeric"].values
selected_rows = (selected_rows == False)
x_test = data2.loc[selected_rows, nl].values
lr = LinearRegression()
lr.fit(x_train, y_train)
data2.loc[selected_rows,"Embarked_numeric"] = lr.predict(x_test)
#Undo the operations
def f11(s):
if s == 0.0:
return "male"
else:
return "female"
def f22(s):
if s == 0.0:
return "S"
elif s == 1.0:
return "Q"
else:
return "C"
data2["Sex"] = data2.Sex_numeric.apply(f11)
data2["Embarked"] = data2.Embarked_numeric.apply(f22)
del data2["Embarked_numeric"]
del data2["Sex_numeric"]
final_path = os.getcwd() + '/' + 'Missing.csv'
data2.to_csv(final_path)
return 1;
def main():
if(len(sys.argv) != 2):
print("Operation failed")
return sys.exit(1)
else:
a = helper()
if(a == 1):
print("Task Complete")
if __name__ == '__main__':
main() | 101703311-Missing-Data | /101703311_Missing_Data-1.0.1-py3-none-any.whl/data/handledata.py | handledata.py |
import sys
import os
import pandas as pd
import math
import numpy as np
class Topsis:
def _init_(self,filename):
if os.path.isdir(filename):
head_tail = os.path.split(filename)
data = pd.read_csv(head_tail[1])
if os.path.isfile(filename):
data = pd.read_csv(filename)
self.d = data.iloc[1:,1:].values
self.features = len(self.d[0])
self.samples = len(self.d)
def fun(self,a):
return a[1]
def fun2(self,a):
return a[0]
def evaluate(self,w = None,im = None):
d = self.d
features = self.features
samples = self.samples
if w==None:
w=[1]*features
if im==None:
im=["+"]*features
ideal_best=[]
ideal_worst=[]
for i in range(0,features):
k = math.sqrt(sum(d[:,i]*d[:,i]))
maxx = 0
minn = 1
for j in range(0,samples):
d[j,i] = (d[j,i]/k)*w[i]
if d[j,i]>maxx:
maxx = d[j,i]
if d[j,i]<minn:
minn = d[j,i]
if im[i] == "+":
ideal_best.append(maxx)
ideal_worst.append(minn)
else:
ideal_best.append(minn)
ideal_worst.append(maxx)
p = []
for i in range(0,samples):
a = math.sqrt(sum((d[i]-ideal_worst)*(d[i]-ideal_worst)))
b = math.sqrt(sum((d[i]-ideal_best)*(d[i]-ideal_best)))
lst = []
lst.append(i)
lst.append(a/(a+b))
p.append(lst)
p.sort(key=self.fun)
rank = 1
for i in range(samples-1,-1,-1):
p[i].append(rank)
rank+=1
p.sort(key=self.fun2)
return p
def findTopsis(filename,w,i):
ob = Topsis(filename)
res = ob.evaluate(w,i)
print(res)
def main():
lst = sys.argv
length = len(lst)
if length > 4 or length< 4:
print("wrong Parameters")
else:
w = list(map(int,lst[2].split(',')))
i = lst[3].split(',')
ob = Topsis(lst[1])
res = ob.evaluate(w,i)
print (res)
if _name_ == '_main_':
main() | 101703373-topsis | /101703373_topsis-1.0.0-py3-none-any.whl/topsis/101703373-topsis.py | 101703373-topsis.py |
import pandas as pd
import sys
import numpy as np
def main():
dataset = pd.read_csv(sys.argv[1]).values #import the dataset
weights = [int(i) for i in sys.argv[2].split(',')] #initalize the weights array entered by user
impacts = sys.argv[3].split(',')
topsis(dataset , weights , impacts) #initalize impacts array entered by user
#dataset = [[250,16,12,5],[200,16,8,3],[300,32,16,4],[275,32,8,4],[225,16,16,2]]
#output = pd.DataFrame(dataset)
#w = [.25,.25,.25,.25]
#beni = ['-','+','+','+']
def topsis(dataset,weights,benificiary):
#importing libraries
import math
# print(dataset)
output=pd.DataFrame(dataset)
a = (output.shape)
#print(output)
rows = a[0]
columns = a[1]
# print(a)
#normalizing the dataset
# dataset = pd.DataFrame(dataset)
# dataset.astype('float')
# dataset.to_numpy()
dataset=np.array(dataset).astype('float32')
for i in range(0,columns):
Fsum=0
for j in range(0,rows):
Fsum += dataset[j][i]*dataset[j][i]
Fsum = math.sqrt(Fsum)
for j in range(0,rows):
dataset[j][i] = dataset[j][i]/Fsum
# print(dataset)
# print(Fsum)
#multipling with weights
for x in range(0,columns):
for y in range(0,rows):
dataset[y][x] *= weights[x]
#finding worst and best of each column
#print(dataset)
vPlus = []
vMinus = []
def findMin(x,rows):
m = 100
for i in range(0,rows):
if(dataset[i][x]<m):
m=dataset[i][x]
return m
def findMax(x,rows):
m = -1
for i in range(0,rows):
if(dataset[i][x]>m):
m=dataset[i][x]
return m
for x in range(0,columns):
if(benificiary[x]=='+'):
vPlus.append(findMax(x,rows))
vMinus.append(findMin(x,rows))
else:
vPlus.append(findMin(x,rows))
vMinus.append(findMax(x,rows))
#calculatind the s+ and s- values
#computing the performance score for each row
def svalue(a,b):
sub = a-b
ans = sub**2
return ans
p = []
#print(vPlus)
#print(vMinus)
for i in range(0,rows):
sum1 = 0
sum2 = 0
for j in range(0,columns):
sum1 = sum1+svalue(dataset[i][j],vPlus[j])
sum2 = sum2+svalue(dataset[i][j],vMinus[j])
sum1 = math.sqrt(sum1)
sum2 = math.sqrt(sum2)
# print(sum1)
# print(sum2)
# print("*****")
p.append(sum2/(sum1+sum2))
output['performance score'] = p
rank = [0 for x in range(rows)]
count=1
q = p.copy()
for i in range(0,rows):
maxpos = q.index(max(q))
rank[maxpos] = count
count=count+1
q[maxpos]=-1
output['rank'] = rank
print(output)
return output
if __name__=="__main__":
main() | 101703476-samiksha | /101703476_samiksha-0.1.tar.gz/101703476_samiksha-0.1/101703476_samiksha/topsis.py | topsis.py |
# Filling Missing Values
Missing Data can occur when no information is provided for one or more items or for a whole unit. Missing Data is a very big problem in real life scenario. Missing Data can also refer to as `NA`(Not Available) values in pandas. In DataFrame sometimes many datasets simply arrive with missing data, either because it exists and was not collected or it never existed.
In this package, the missing values in a csv file are filled using the fillna function in pandas. For this the statistical model of mean is used.
## Usage
$ python3 missing.py filename
| 101703573-Missing-pkg-suruchipundir | /101703573_Missing-pkg-suruchipundir-0.0.1.tar.gz/101703573_Missing-pkg-suruchipundir-0.0.1/README.md | README.md |
import sys
import pandas as pd
import numpy as np
def normalized_matrix(filename):
'''To normalize each of the values in the csv file'''
try:
dataset = pd.read_csv(filename) #loading the csv file into dataset
if len(dataset.axes[1])<3:
print("Number of columns should be greater than 3")
sys.exit(1)
attributes = dataset.iloc[:,1:].values
'''the attributes and alternatives are 2-D numpy arrays'''
sum_cols=[0]*len(attributes[0]) #1-D array with size equal to the nummber of columns in the attributes array
for i in range(len(attributes)):
for j in range(len(attributes[i])):
sum_cols[j]+=np.square(attributes[i][j])
for i in range(len(sum_cols)):
sum_cols[i]=np.sqrt(sum_cols[i])
for i in range(len(attributes)):
for j in range(len(attributes[i])):
attributes[i][j]=attributes[i][j]/sum_cols[j]
return (attributes)
except Exception as e:
print(e)
def weighted_matrix(attributes,weights):
''' To multiply each of the values in the attributes array with the corresponding weights of the particular attribute'''
try:
weights=weights.split(',')
for i in range(len(weights)):
weights[i]=float(weights[i])
weighted_attributes=[]
for i in range(len(attributes)):
temp=[]
for j in range(len(attributes[i])):
temp.append(attributes[i][j]*weights[j])
weighted_attributes.append(temp)
return(weighted_attributes)
except Exception as e:
print(e)
def impact_matrix(weighted_attributes,impacts):
try:
impacts=impacts.split(',')
Vjpositive=[]
Vjnegative=[]
for i in range(len(weighted_attributes[0])):
Vjpositive.append(weighted_attributes[0][i])
Vjnegative.append(weighted_attributes[0][i])
for i in range(1,len(weighted_attributes)):
for j in range(len(weighted_attributes[i])):
if impacts[j]=='+':
if weighted_attributes[i][j]>Vjpositive[j]:
Vjpositive[j]=weighted_attributes[i][j]
elif weighted_attributes[i][j]<Vjnegative[j]:
Vjnegative[j]=weighted_attributes[i][j]
elif impacts[j]=='-':
if weighted_attributes[i][j]<Vjpositive[j]:
Vjpositive[j]=weighted_attributes[i][j]
elif weighted_attributes[i][j]>Vjnegative[j]:
Vjnegative[j]=weighted_attributes[i][j]
Sjpositive=[0]*len(weighted_attributes)
Sjnegative=[0]*len(weighted_attributes)
for i in range(len(weighted_attributes)):
for j in range(len(weighted_attributes[i])):
Sjpositive[i]+=np.square(weighted_attributes[i][j]-Vjpositive[j])
Sjnegative[i]+=np.square(weighted_attributes[i][j]-Vjnegative[j])
for i in range(len(Sjpositive)):
Sjpositive[i]=np.sqrt(Sjpositive[i])
Sjnegative[i]=np.sqrt(Sjnegative[i])
Performance_score=[0]*len(weighted_attributes)
for i in range(len(weighted_attributes)):
Performance_score[i]=Sjnegative[i]/(Sjnegative[i]+Sjpositive[i])
return(Performance_score)
except Exception as e:
print(e)
def rank(filename,weights,impacts,resultfilename):
try:
a = normalized_matrix(filename)
c = weighted_matrix(a,weights)
d = impact_matrix(c,impacts)
dataset = pd.read_csv(filename)
dataset['topsis score']=""
dataset['topsis score']=d
copi=d.copy()
copi.sort(reverse=True)
Rank=[]
for i in range(0,len(d)):
temp=d[i]
for j in range(0,len(copi)):
if temp==copi[j]:
Rank.append(j+1)
break
dataset['Rank']=""
dataset['Rank']=Rank
dataset.to_csv(resultfilename,index=False)
except Exception as e:
print(e) | 101903683-kunal-topsis | /101903683_kunal_topsis-v1.2.tar.gz/101903683_kunal_topsis-v1.2/101903683_kunal_topsis/topsis.py | topsis.py |
import numpy as np
import pandas as pd
import sys
def create_matrix(matrix):
matrix=matrix[:,1:]
return matrix
def normalize(matrix,weight):
column_squared_sum=np.zeros(matrix.shape[1])
for j in range(matrix.shape[1]):
for i in range(matrix.shape[0]):
column_squared_sum[j]+=matrix[i][j]*matrix[i][j]
column_squared_sum[j]=np.sqrt(column_squared_sum[j])
matrix[:,j:j+1]=matrix[:,j:j+1]/column_squared_sum[j]
return normailze_matrix(matrix,weight=np.asarray(weight))
def normailze_matrix( matrix,weight):
totalweight=np.sum(weight)
weight=weight/totalweight
normailze_matrix=weight*matrix
return normailze_matrix
def cases(normailze_matrix,is_max_the_most_desired):
ideal_best=np.zeros(normailze_matrix.shape[1])
ideal_worst = np.zeros(normailze_matrix.shape[1])
for j in range(normailze_matrix.shape[1]):
if is_max_the_most_desired[j]==1:
ideal_best[j]=np.max(normailze_matrix[:,j])
ideal_worst[j] = np.min(normailze_matrix[:, j])
else:
ideal_worst[j] = np.max(normailze_matrix[:, j])
ideal_best[j] = np.min(normailze_matrix[:, j])
return Euclidean(normailze_matrix,ideal_best,ideal_worst)
def Euclidean(matrix, ideal_best,ideal_worst):
euclidean_best=np.zeros(matrix.shape[0])
euclidean_worst=np.zeros(matrix.shape[0])
for i in range(matrix.shape[0]):
eachrowBest=0
eachRowWorst=0
for j in range(matrix.shape[1]):
eachrowBest+=(matrix[i][j]-ideal_best[j])**2
eachRowWorst+= (matrix[i][j] - ideal_worst[j])**2
euclidean_best[i]=np.sqrt(eachrowBest)
euclidean_worst[i]=np.sqrt(eachRowWorst)
return performance_score(matrix,euclidean_best,euclidean_worst)
def performance_score(matrix,euclidean_best,euclidean_worst):
performance=np.zeros(matrix.shape[0])
for i in range( matrix.shape[0]):
performance[i]=euclidean_worst[i]/(euclidean_best[i]+euclidean_worst[i])
return performance
def topsis():
try:
filename=sys.argv[1]
except:
print('please provide 4 arguements as inputData.csv weights impacts outputFile.csv')
sys.exit(1)
try:
weight_input = sys.argv[2]
except:
print('please provide 3 more arguement')
sys.exit(1)
try:
impacts = sys.argv[3]
except:
print('please provide 2 more arguement')
sys.exit(1)
try:
impacts = sys.argv[3]
except:
print('please provide 1 more arguement')
sys.exit(1)
try:
df = pd.read_csv(filename)
except:
print('Could not read the file given by you')
number_columns=len(df.columns)
if number_columns<3:
raise Exception("Less Col")
if len(sys.argv)!=5:
raise Exception("WrongInput")
if df.isnull().sum().sum()>0:
raise Exception("Blank")
outputFileName = sys.argv[4]
matrix = df.values
original_matrix=matrix
try:
impacts_1=list(e for e in impacts.split(','))
impact_final =[]
for i in impacts_1 :
if(i=='+'):
impact_final.append(1)
elif(i=='-'):
impact_final.append(0)
else:
raise Exception('Impacts must be + or -')
except:
print('could not correctly parse correctly impacts arguement ')
try:
weights=list(float(w) for w in weight_input.split(','))
except:
print(" could not correctly parse weigths argument")
matrix=create_matrix(matrix)
normailze_matrix=normalize(matrix,weights)
performance=cases(normailze_matrix,np.asarray(impact_final))
l = list(performance)
rank = [sorted(l, reverse=True).index(x) for x in l]
df['Score'] = performance
df['Rank'] = rank
df['Rank'] = df['Rank'] + 1
df.to_csv(outputFileName)
topsis() | 101903697-Topsis-code | /101903697_Topsis_code-0.0.1-py3-none-any.whl/New folder/101903697.py.py | 101903697.py.py |
import numpy as np
import pandas as pd
import sys
def create_matrix(matrix):
matrix=matrix[:,1:]
return matrix
def normalize(matrix,weight):
column_squared_sum=np.zeros(matrix.shape[1])
for j in range(matrix.shape[1]):
for i in range(matrix.shape[0]):
column_squared_sum[j]+=matrix[i][j]*matrix[i][j]
column_squared_sum[j]=np.sqrt(column_squared_sum[j])
matrix[:,j:j+1]=matrix[:,j:j+1]/column_squared_sum[j]
return normailze_matrix(matrix,weight=np.asarray(weight))
def normailze_matrix( matrix,weight):
totalweight=np.sum(weight)
weight=weight/totalweight
normailze_matrix=weight*matrix
return normailze_matrix
def cases(normailze_matrix,is_max_the_most_desired):
ideal_best=np.zeros(normailze_matrix.shape[1])
ideal_worst = np.zeros(normailze_matrix.shape[1])
for j in range(normailze_matrix.shape[1]):
if is_max_the_most_desired[j]==1:
ideal_best[j]=np.max(normailze_matrix[:,j])
ideal_worst[j] = np.min(normailze_matrix[:, j])
else:
ideal_worst[j] = np.max(normailze_matrix[:, j])
ideal_best[j] = np.min(normailze_matrix[:, j])
return Euclidean(normailze_matrix,ideal_best,ideal_worst)
def Euclidean(matrix, ideal_best,ideal_worst):
euclidean_best=np.zeros(matrix.shape[0])
euclidean_worst=np.zeros(matrix.shape[0])
for i in range(matrix.shape[0]):
eachrowBest=0
eachRowWorst=0
for j in range(matrix.shape[1]):
eachrowBest+=(matrix[i][j]-ideal_best[j])**2
eachRowWorst+= (matrix[i][j] - ideal_worst[j])**2
euclidean_best[i]=np.sqrt(eachrowBest)
euclidean_worst[i]=np.sqrt(eachRowWorst)
return performance_score(matrix,euclidean_best,euclidean_worst)
def performance_score(matrix,euclidean_best,euclidean_worst):
performance=np.zeros(matrix.shape[0])
for i in range( matrix.shape[0]):
performance[i]=euclidean_worst[i]/(euclidean_best[i]+euclidean_worst[i])
return performance
def topsis():
try:
filename=sys.argv[1]
except:
print('please provide 4 arguements as inputData.csv weights impacts outputFile.csv')
sys.exit(1)
try:
weight_input = sys.argv[2]
except:
print('please provide 3 more arguement')
sys.exit(1)
try:
impacts = sys.argv[3]
except:
print('please provide 2 more arguement')
sys.exit(1)
try:
impacts = sys.argv[3]
except:
print('please provide 1 more arguement')
sys.exit(1)
try:
df = pd.read_csv(filename)
except:
print('Could not read the file given by you')
number_columns=len(df.columns)
if number_columns<3:
raise Exception("Less Col")
if len(sys.argv)!=5:
raise Exception("WrongInput")
if df.isnull().sum().sum()>0:
raise Exception("Blank")
outputFileName = sys.argv[4]
matrix = df.values
original_matrix=matrix
try:
impacts_1=list(e for e in impacts.split(','))
impact_final =[]
for i in impacts_1 :
if(i=='+'):
impact_final.append(1)
elif(i=='-'):
impact_final.append(0)
else:
raise Exception('Impacts must be + or -')
except:
print('could not correctly parse correctly impacts arguement ')
try:
weights=list(float(w) for w in weight_input.split(','))
except:
print(" could not correctly parse weigths argument")
matrix=create_matrix(matrix)
normailze_matrix=normalize(matrix,weights)
performance=cases(normailze_matrix,np.asarray(impact_final))
l = list(performance)
rank = [sorted(l, reverse=True).index(x) for x in l]
df['Score'] = performance
df['Rank'] = rank
df['Rank'] = df['Rank'] + 1
df.to_csv(outputFileName)
topsis() | 101903700-Topsis-code | /101903700-Topsis-code-0.0.1.tar.gz/101903700-Topsis-code-0.0.1/code_topsis/101903700.py | 101903700.py |
import sys
import pandas as pd
import math
import copy
n = len(sys.argv)
if n == 5:
if sys.argv[1] == "file name":
try:
top = pd.read_csv(sys.argv[1])
finl = copy.deepcopy(top)
except:
print('Error! File not Found')
sys.exit()
if top.shape[1] >= 3:
for col in top.columns[1:]:
try:
pd.to_numeric(top[col])
except:
print("Error! Not all the columns after 2nd are numeric")
we = list(sys.argv[2].split(','))
I = list(sys.argv[3].split(','))
w = []
for i in we:
w.append(float(i))
if top.shape[1]-1 == len(w) and top.shape[1]-1 == len(I):
list1 = []
for col in top.columns[1:]:
num = 0
for row in top[col]:
num = num + row * row
list1.append(num)
k = 1
for i in range(top.shape[0]):
for j in range(1, top.shape[1]):
top.iloc[i, j] = top.iloc[i, j] / list1[j - 1]
for i in range(top.shape[0]):
for j in range(1, top.shape[1]):
top.iloc[i, j] = top.iloc[i, j] * w[j - 1]
best = []
worst = []
k = 0
for col in top.columns[1:]:
if I[k] == '-':
best.append(top[col].min())
worst.append(top[col].max())
else:
best.append(top[col].max())
worst.append(top[col].min())
k = k + 1
E_best = []
E_worst = []
for i in range(top.shape[0]):
sq_best = 0
sq_worst = 0
diff = 0
diff_best = 0
diff_worst = 0
for j in range(1, top.shape[1]):
diff = top.iloc[i, j] - best[j-1]
diff_best = diff * diff
diff = top.iloc[i, j] - worst[j - 1]
diff_worst = diff * diff
sq_best = sq_best + diff_best
sq_worst = sq_worst + diff_worst
E_best.append(math.sqrt(sq_best))
E_worst.append(math.sqrt(sq_worst))
P_score = []
for i in range(top.shape[0]):
P_score.append(E_worst[i] / (E_worst[i] + E_best[i]))
finl['Topsis Score'] = P_score
finl['Rank'] = finl['Topsis Score'].rank(ascending=False)
finl.to_csv(sys.argv[4])
print("Output file successfully created.")
else:
print("Error! Impacts and weights must be separated by ‘,’ (comma).")
sys.exit()
else:
print("Error! Input file must have more than 3 columns.")
sys.exit()
else:
print("Error! File not found")
sys.exit()
else:
print("Error! Arguments passed are either more or less than 4.") | 101903751-topsis | /101903751-topsis-.tar.gz/101903751-topsis-1.0.0/project/__main__.py | __main__.py |
# 102003037 TOPSIS PACKAGE HIMANGI SHARMA
Roll Number : 102003037 <br>
Subgroup : 3COE18 <br>
The program takes csv file containing our data to be ranked, weights and impacts in the form of "+" or "-", seperated by commas as inputs and then outputs a resultant csv file with two additional columns of performance score and Ranks.
# What is TOPSIS
TOPSIS, Technique of Order Preference Similarity to the Ideal Solution, is a multi-criteria decision analysis method (MCDA). <br>
It chooses the alternative of shortest the Euclidean distance from the ideal solution and greatest distance from the negative ideal solution. <br>
## Installation
### How to install the TOPSIS package <br>
using pip install:-<br>
``` pip install 102003037-topsis-Himangi ```
## For Calculating the TOPSIS Score
Open terminal and type <br>
``` 102003037 102003037-data.csv "1,1,1,1" "+,+,-,+" 102003037-output.csv ```
The output will then be saved in a newly created CSV file whose name will be provided in the command line by the user.
## Input File [102003037-data.csv]:
Topsis mathematical operations to be performed on the input file which contains a dataset having different fields.
## Weights ["1,1,1,1"]
The weights to assigned to the different parameters in the dataset should be passed in the argument, seperated by commas.
## Impacts ["+,+,-,+"]:
The impacts are passed to consider which parameters have a positive impact on the decision and which one have the negative impact. Only '+' and '-' values should be passed and should be seperated with ',' only.
## Output File [102003037-output.csv]:
This argument is used to pass the path of the result file where we want the rank and performance score to be stored.
| 102003037-topsis | /102003037-topsis-0.0.1.tar.gz/102003037-topsis-0.0.1/README.md | README.md |
import sys
import os
import pandas as pd
from datetime import datetime
from math import sqrt
import numpy as np
def sorter(x):
return int(x[1])
class Error(Exception):
pass
class MoreArgumentsError(Error):
pass
class NoArgumentsError(Error):
pass
class InvalidDataError(Error):
pass
class NumericError(Error):
pass
class ImpactsError(Error):
pass
class WeightsError(Error):
pass
class ImpactsTypeError(Error):
pass
class CommaError(Error):
pass
def TOPSIS():
"""This function returns a file by the name specified by the user in the command line arguments which contains the TOPSIS score as well as
rank for the different records being compared.
Usage:
1) Create a script by importing the package and just calling the TOPSIS function.
import importlib
topsis=importlib.import_module("Topsis-Inaayat-102003050")
topsis.TOPSIS()
2) Run the script from terminal with command line arguments:
C:/Users/admin> python myscript.py <Data_File_csv> <Weights(Comma_seperated)> <Impacts(Comma_seperated)> <Result_file_csv>
"""
args=sys.argv
try:
if len(args)<5:
raise(NoArgumentsError)
elif len(args)>5:
raise(MoreArgumentsError)
df=pd.read_csv(args[1])
if len(list(df.columns))<3:
raise(InvalidDataError)
d=pd.read_csv(args[1])
for i in df.columns[1:]:
if not(np.issubdtype(df[i].dtype, np.number)):
raise(NumericError)
sums=[np.sum(df.iloc[:,i].values**2) for i in range(1,len(df.columns))]
sums=[i**0.5 for i in sums]
sums=np.array(sums)
if(args[2].count(",")!=len(df.columns)-2 or args[3].count(",")!=len(df.columns)-2):
raise(CommaError)
weights=[ int(i) for i in args[2].split(",")]
impacts=args[3].split(",")
for i in impacts:
if( i!="+" and i!="-"):
print((i))
raise(ImpactsTypeError)
if(len(impacts)!=len(df.columns)-1):
raise(ImpactsError)
if(len(weights)!=len(df.columns)-1):
raise(WeightsError)
for i in range(len(df)):
df.iloc[i,1:]=(df.iloc[i,1:]/sums)*weights
ibest=[]
iworst=[]
#print(df)
for i in range(1,len(df.columns)):
if impacts[i-1]=="+":
ibest.append(max(df[df.columns[i]].values))
iworst.append(min(df[df.columns[i]].values))
elif impacts[i-1]=="-":
iworst.append(max(df[df.columns[i]].values))
ibest.append(min(df[df.columns[i]].values))
#print(ibest,iworst)
ibest=np.array(ibest)
iworst=np.array(iworst)
disbest=[sqrt(np.sum(np.square(ibest-df.iloc[i,1:].values))) for i in range(len(df))]
disworst=[sqrt(np.sum(np.square(iworst-df.iloc[i,1:].values))) for i in range(len(df))]
topsis=[disworst[i]/(disworst[i]+disbest[i]) for i in range(len(disbest))]
d["TOPSIS"]=topsis
d["Rank"]=d["TOPSIS"].rank(method="max",ascending=False)
d.to_csv(args[4],index=False)
except FileNotFoundError:
print("[",datetime.now(),"]","File Not Found: Cannot find the file",args[1],"at specified path")
except MoreArgumentsError:
print("[",datetime.now(),"]","Too Many Arguments Supplied for Runtime")
except NoArgumentsError:
print("[",datetime.now(),"]","Insufficient Arguments Supplied for Runtime")
except InvalidDataError:
print("[",datetime.now(),"]","File",args[1],"cannot be processed due to invalid structure(More Columns Required)")
except NumericError:
print("[",datetime.now(),"]","File",args[1],"cannot be processed due to invalid structure( 2nd to last columns must be numeric)")
except CommaError:
print("[",datetime.now(),"]","File",args[1],"cannot be processed due to invalid imput(Impacts and Weights must be seperated by comma)")
except ImpactsTypeError:
print("[",datetime.now(),"]","File",args[1],"cannot be processed due to invalid imput(Impacts must be either + or -)")
except ImpactsError:
print("[",datetime.now(),"]","File",args[1],"cannot be processed due to invalid imput(Impacts are not equal to features)")
except WeightsError:
print("[",datetime.now(),"]","File",args[1],"cannot be processed due to invalid imput(Weights are not equal to features)") | 102003050-topsis | /102003050_topsis-0.1.tar.gz/102003050_topsis-0.1/102003050_topsis/topsis.py | topsis.py |
import pandas as pd
import sys
import math
# read_file.to_csv("102003712-data.csv",
# index = None,
# header=True)
def main():
try:
read_file = pd.read_csv(sys.argv[1])
df = pd.DataFrame(read_file)
df1 = df.drop(df.columns[0], axis=1)
w = sys.argv[2]
weight = w.split(",")
weight = [eval(i) for i in weight]
i = sys.argv[3]
impact1 = i.split(",")
impact = []
for i in impact1:
if i == '+':
impact.append(1)
elif (i == '-'):
impact.append(0)
# print(impact)
rows = df1.shape[0]
cols = df1.shape[1]
ss = []
for j in range(0, cols):
sum = 0
for i in range(0, rows):
sum = sum+(df1.iloc[i, j]*df1.iloc[i, j])
sum = math.sqrt(sum)
ss.append(sum)
# print(ss)
for j in range(0, cols):
for i in range(0, rows):
df1.iloc[i, j] = (df1.iloc[i, j]/ss[j])*weight[j]
best = []
worst = []
for j in range(0, cols):
max = -1
min = 10000
for i in range(0, rows):
if (df1.iloc[i, j] > max):
max = df1.iloc[i, j]
if (df1.iloc[i, j] < min):
min2 = df1.iloc[i, j]
if (impact[j] == 1):
best.append(max)
worst.append(min)
elif (impact[j] == 0):
best.append(min)
worst.append(max)
ed_b = []
ed_w = []
for i in range(0, rows):
sum_b = 0
sum_w = 0
for j in range(0, cols):
sum_b = sum_b+((df1.iloc[i, j]-best[j])
* (df1.iloc[i, j]-best[j]))
sum_w = sum_w+((df1.iloc[i, j]-worst[j])
* (df1.iloc[i, j]-worst[j]))
ed_b.append(math.sqrt(sum_b))
ed_w.append(math.sqrt(sum_w))
p = []
for i in range(0, rows):
p.append(ed_w[i]/(ed_b[i]+ed_w[i]))
df["score"] = p
df["Rank"] = df["score"].rank()
df.to_csv(sys.argv[4], index=False)
except FileNotFoundError:
print('file not found')
except:
if (len(sys.argv) != 5):
print('ERROR: Please provide four arguments')
elif (len(weight) != len(impact) or len(weight) != cols or len(impact) != cols):
print('ERROR: incorrect arguments')
else:
print('ERROR')
if __name__ == '__main__':
main() | 102003712 | /102003712-0.0.6-py3-none-any.whl/topsisLibrary/topsis.py | topsis.py |
import pandas as pd
import sys
import math
# read_file.to_csv("102003712-data.csv",
# index = None,
# header=True)
def main():
try:
read_file = pd.read_csv(sys.argv[1])
df = pd.DataFrame(read_file)
df1 = df.drop(df.columns[0], axis=1)
w = sys.argv[2]
weight = w.split(",")
weight = [eval(i) for i in weight]
i = sys.argv[3]
impact1 = i.split(",")
impact = []
for i in impact1:
if i == '+':
impact.append(1)
elif (i == '-'):
impact.append(0)
# print(impact)
rows = df1.shape[0]
cols = df1.shape[1]
ss = []
for j in range(0, cols):
sum = 0
for i in range(0, rows):
sum = sum+(df1.iloc[i, j]*df1.iloc[i, j])
sum = math.sqrt(sum)
ss.append(sum)
# print(ss)
for j in range(0, cols):
for i in range(0, rows):
df1.iloc[i, j] = (df1.iloc[i, j]/ss[j])*weight[j]
best = []
worst = []
for j in range(0, cols):
max = -1
min = 10000
for i in range(0, rows):
if (df1.iloc[i, j] > max):
max = df1.iloc[i, j]
if (df1.iloc[i, j] < min):
min2 = df1.iloc[i, j]
if (impact[j] == 1):
best.append(max)
worst.append(min)
elif (impact[j] == 0):
best.append(min)
worst.append(max)
ed_b = []
ed_w = []
for i in range(0, rows):
sum_b = 0
sum_w = 0
for j in range(0, cols):
sum_b = sum_b+((df1.iloc[i, j]-best[j])
* (df1.iloc[i, j]-best[j]))
sum_w = sum_w+((df1.iloc[i, j]-worst[j])
* (df1.iloc[i, j]-worst[j]))
ed_b.append(math.sqrt(sum_b))
ed_w.append(math.sqrt(sum_w))
p = []
for i in range(0, rows):
p.append(ed_w[i]/(ed_b[i]+ed_w[i]))
df["score"] = p
df["Rank"] = df["score"].rank()
df.to_csv(sys.argv[4], index=False)
except FileNotFoundError:
print('file not found')
except:
if (len(sys.argv) != 5):
print('ERROR: Please provide four arguments')
elif (len(weight) != len(impact) or len(weight) != cols or len(impact) != cols):
print('ERROR: incorrect arguments')
else:
print('ERROR')
if __name__ == '__main__':
main() | 102003759 | /102003759-0.0.1-py3-none-any.whl/Topsis/topsis.py | topsis.py |
import pandas as pd
import sys
import math
# author : Sahil Chhabra
# email : [email protected]
def main():
try:
read_file = pd.read_csv(sys.argv[1])
print(sys.argv)
df = pd.DataFrame(read_file)
df1 = df.drop(df.columns[0], axis=1)
w = sys.argv[2]
weight = w.split(",")
weight = [eval(i) for i in weight]
i = sys.argv[3]
impact1 = i.split(",")
impact = []
for i in impact1:
if i == '+':
impact.append(1)
elif (i == '-'):
impact.append(0)
# print(impact)
rows = df1.shape[0]
cols = df1.shape[1]
ss = []
for j in range(0, cols):
sum = 0
for i in range(0, rows):
sum = sum+(df1.iloc[i, j]*df1.iloc[i, j])
sum = math.sqrt(sum)
ss.append(sum)
# print(ss)
for j in range(0, cols):
for i in range(0, rows):
df1.iloc[i, j] = (df1.iloc[i, j]/ss[j])*weight[j]
best = []
worst = []
for j in range(0, cols):
max = -1
min = 10000
for i in range(0, rows):
if (df1.iloc[i, j] > max):
max = df1.iloc[i, j]
if (df1.iloc[i, j] < min):
min2 = df1.iloc[i, j]
if (impact[j] == 1):
best.append(max)
worst.append(min)
elif (impact[j] == 0):
best.append(min)
worst.append(max)
ed_b = []
ed_w = []
for i in range(0, rows):
sum_b = 0
sum_w = 0
for j in range(0, cols):
sum_b = sum_b+((df1.iloc[i, j]-best[j])
* (df1.iloc[i, j]-best[j]))
sum_w = sum_w+((df1.iloc[i, j]-worst[j])
* (df1.iloc[i, j]-worst[j]))
ed_b.append(math.sqrt(sum_b))
ed_w.append(math.sqrt(sum_w))
p = []
for i in range(0, rows):
p.append(ed_w[i]/(ed_b[i]+ed_w[i]))
df["score"] = p
df["Rank"] = df["score"].rank()
df.to_csv(sys.argv[4], index=False)
except FileNotFoundError:
print('file not found')
except:
if (len(sys.argv) != 5):
print('ERROR: Please provide four arguments')
elif (len(weight) != len(impact) or len(weight) != cols or len(impact) != cols):
print('ERROR: incorrect arguments')
else:
print('ERROR')
if __name__ == '__main__':
main() | 102003766-topsis | /102003766_topsis-0.0.1-py3-none-any.whl/topsis.py | topsis.py |
# 102017059_Aakanksha_Topsis
This package is implementation of multi-criteria decision analysis using topsis. This package will accept three arguments during file execution:
dataset.csv //file which contains the models and parameters
string of weights separated by commas(,)
string of requirements (+/-) separated by commas(,) // important install pandas,sys,operator and math libraries before installing this // You can install this package using following command pip install 102017059_Aakanksha_Topsis
| 102017059-Aakanksha-Topsis | /102017059_Aakanksha_Topsis-0.0.0.tar.gz/102017059_Aakanksha_Topsis-0.0.0/README.md | README.md |
TOPSIS Package
TOPSIS stands for Technique for Oder Preference by Similarity to Ideal Solution. It is a method of compensatory aggregation that compares a set of alternatives by identifying weights for each criterion, normalising scores for each criterion and calculating the geometric distance between each alternative and the ideal alternative, which is the best score in each criterion. An assumption of TOPSIS is that the criteria are monotonically increasing or decreasing. In this Python package Vector Normalization has been implemented.
This package has been created based on Project 1 of course UCS633. Tarandeep Singh 102017067
In Command Prompt
>topsis data.csv "1,1,1,1" "+,+,-,+"
| 102017067-topsis | /102017067-topsis-1.0.0.tar.gz/102017067-topsis-1.0.0/README.md | README.md |
import sys
import os
import pandas as pd
import math
import numpy as np
class Topsis:
def __init__(self,filename):
if os.path.isdir(filename):
head_tails = os.path.split(filename)
data = pd.read_csv(head_tails[1])
if os.path.isfile(filename):
data = pd.read_csv(filename)
self.d = data.iloc[1:,1:].values
self.features = len(self.d[0])
self.samples = len(self.d)
def fun(self,a):
return a[1]
def fun2(self,a):
return a[0]
def evaluate(self,w = None,im = None):
d = self.d
features = self.features
samples = self.samples
if w==None:
w=[1]*features
if im==None:
im=["+"]*features
ideal_best=[]
ideal_worst=[]
for i in range(0,features):
k = math.sqrt(sum(d[:,i]*d[:,i]))
maxx = 0
minn = 1
for j in range(0,samples):
d[j,i] = (d[j,i]/k)*w[i]
if d[j,i]>maxx:
maxx = d[j,i]
if d[j,i]<minn:
minn = d[j,i]
if im[i] == "+":
ideal_best.append(maxx)
ideal_worst.append(minn)
else:
ideal_best.append(minn)
ideal_worst.append(maxx)
plt = []
for i in range(0,samples):
a = math.sqrt(sum((d[i]-ideal_worst)*(d[i]-ideal_worst)))
b = math.sqrt(sum((d[i]-ideal_best)*(d[i]-ideal_best)))
lst = []
lst.append(i)
lst.append(a/(a+b))
plt.append(lst)
plt.sort(key=self.fun)
rank = 1
for i in range(samples-1,-1,-1):
plt[i].append(rank)
rank+=1
plt.sort(key=self.fun2)
return plt
def findTopsis(filename,w,i):
observations = Topsis(filename)
result = observations.evaluate(w,i)
print(result)
def main():
lst = sys.argv
length = len(lst)
if length > 4 or length< 4:
print("wrong Parameters")
else:
w = list(map(int,lst[2].split(',')))
i = lst[3].split(',')
observations = Topsis(lst[1])
result = observations.evaluate(w,i)
# print (res)
# print(type(res))
# df = pd.DataFrame(res)
# df.to_csv('EmployeeData.csv')
# res.to_csv("output.csv")
dataframe = pd.DataFrame(result)
dataframe.to_csv("output.csv")
if __name__ == '__main__':
main() | 102017067-topsis | /102017067-topsis-1.0.0.tar.gz/102017067-topsis-1.0.0/topsis/102017067.py | 102017067.py |
TOPSIS Package
TOPSIS stands for Technique for Oder Preference by Similarity to Ideal Solution. It is a method of compensatory aggregation that compares a set of alternatives by identifying weights for each criterion, normalising scores for each criterion and calculating the geometric distance between each alternative and the ideal alternative, which is the best score in each criterion. An assumption of TOPSIS is that the criteria are monotonically increasing or decreasing. In this Python package Vector Normalization has been implemented.
This package has been created based on Assignment 1 of course UCS654. Prince Sharma 102017119
In Command Prompt
>topsis data.csv "1,1,1,1" "+,+,-,+"
| 102017119-topsis | /102017119-topsis-1.0.0.tar.gz/102017119-topsis-1.0.0/README.md | README.md |
import sys
import os
import pandas as pd
import math
import numpy as np
class Topsis:
def __init__(self,filename):
if os.path.isdir(filename):
head_tails = os.path.split(filename)
data = pd.read_csv(head_tails[1])
if os.path.isfile(filename):
data = pd.read_csv(filename)
self.d = data.iloc[1:,1:].values
self.features = len(self.d[0])
self.samples = len(self.d)
def fun(self,a):
return a[1]
def fun2(self,a):
return a[0]
def evaluate(self,w = None,im = None):
d = self.d
features = self.features
samples = self.samples
if w==None:
w=[1]*features
if im==None:
im=["+"]*features
ideal_best=[]
ideal_worst=[]
for i in range(0,features):
k = math.sqrt(sum(d[:,i]*d[:,i]))
maxx = 0
minn = 1
for j in range(0,samples):
d[j,i] = (d[j,i]/k)*w[i]
if d[j,i]>maxx:
maxx = d[j,i]
if d[j,i]<minn:
minn = d[j,i]
if im[i] == "+":
ideal_best.append(maxx)
ideal_worst.append(minn)
else:
ideal_best.append(minn)
ideal_worst.append(maxx)
plt = []
for i in range(0,samples):
a = math.sqrt(sum((d[i]-ideal_worst)*(d[i]-ideal_worst)))
b = math.sqrt(sum((d[i]-ideal_best)*(d[i]-ideal_best)))
lst = []
lst.append(i)
lst.append(a/(a+b))
plt.append(lst)
plt.sort(key=self.fun)
rank = 1
for i in range(samples-1,-1,-1):
plt[i].append(rank)
rank+=1
plt.sort(key=self.fun2)
return plt
def findTopsis(filename,w,i):
observations = Topsis(filename)
result = observations.evaluate(w,i)
print(result)
def main():
lst = sys.argv
length = len(lst)
if length > 4 or length< 4:
print("wrong Parameters")
else:
w = list(map(int,lst[2].split(',')))
i = lst[3].split(',')
observations = Topsis(lst[1])
result = observations.evaluate(w,i)
# print (res)
# print(type(res))
# df = pd.DataFrame(res)
# df.to_csv('EmployeeData.csv')
# res.to_csv("output.csv")
dataframe = pd.DataFrame(result)
dataframe.to_csv("output.csv")
if __name__ == '__main__':
main() | 102017119-topsis | /102017119-topsis-1.0.0.tar.gz/102017119-topsis-1.0.0/topsis/102017119.py | 102017119.py |
import pandas as pd
import sys
import os
def main() :
if len(sys.argv) != 5 : #for the proper usage
print("ERROR : NUMBER OF PARAMETERS")
print("USAGE : python <filename>.py inputfile.csv '1,1,1,1' '+,+,-,+' result.csv ")
exit(1)
elif not os.path.isfile(sys.argv[1]): #for file not found
print(f"ERROR : {sys.argv[1]} Doesn't exist, Please check if you have entered the right file")
exit(1)
elif ".csv" != (os.path.splitext(sys.argv[1]))[1]: #for csv format
print(f"ERROR : Please enter {sys.argv[1]} in csv format")
exit(1)
else :
dataset = pd.read_csv(sys.argv[1])
ncol = len(dataset.columns.values)
if ncol < 3 :
print("ERROR : Minimum Number of Columns should be 3")
exit(1)
for i in range(1,ncol) :
pd.to_numeric(dataset.iloc[:,i],errors='coerce')
#if there are missing values
#dataset.iloc[:,i].fillna((dataset[:,i].values.mean()),inplace=True)
try :
weights = [int(i) for i in sys.argv[2].split(',')]
except :
print('ERROR : Weights array not input properly')
exit(1)
#checking impact array
for i in sys.argv[3].split(',') :
if i not in ['+','-'] :
print('Error : Impacts can only be + or -')
exit(1)
impact = sys.argv[3].split(',')
if ncol != len(weights) + 1 or ncol != len(impact) :
print("ERROR : The lenghts of either weights or impact doesn't match with the dataset length")
print('Length of dataset : ',ncol-1,'\n Length of weights :',len(weights),'\nLenght of imapcts :',len(impact))
exit(1)
if('.csv' != (os.path.splitext(sys.argv[4]))[1]) :
print('ERROR : output file should be in csv form')
exit(1)
topsis = Topsis()
topsis.topsis(dataset,weights,impact,ncol)
class Topsis :
def __Normalize(self,dataset,nCol,weight) :
for i in range(1,nCol) :
temp = 0
for j in range(len(dataset)) :
temp = temp + dataset.iloc[j,i] ** 2 #sum of squares
temp = temp ** 0.5
for j in range(len(dataset)) :
dataset.iat[j,i] = (dataset.iloc[j,i] / temp) * weight[i-1] #adjusting according to weights
#print(dataset)
return dataset
def __ideal_best_worst(self,dataset,ncol,impact) :
ideal_best_values = (dataset.max().values)[1:]
ideal_worst_values = (dataset.min().values)[1:]
#print(ncol,len(impact))
for i in range(1,ncol) :
if impact[i-1] == '-' :
ideal_best_values[i-1],ideal_worst_values[i-1] = ideal_worst_values[i-1],ideal_best_values[i-1]
return ideal_best_values, ideal_worst_values
def topsis(self,dataset,weights,impact,ncol) :
#ncol = len(dataset.axes[1])
dataset = self.__Normalize(dataset,ncol,weights)
p_sln , n_sln = self.__ideal_best_worst(dataset,ncol,impact)
score = []
pp = [] #positive distances
nn = [] #negative distances
for i in range(len(dataset)) :
temp_p,temp_n = 0,0
for j in range(1,ncol) :
temp_p += (p_sln[j-1] - dataset.iloc[i,j])**2
temp_n += (n_sln[j-1] - dataset.iloc[i,j])**2
temp_p,temp_n = temp_p**0.5,temp_n**0.5
score.append(temp_n/(temp_p+temp_n))
nn.append(temp_n)
pp.append(temp_p)
# dataset['positive distance'] = pp
# dataset['negative distance'] = nn
#print(score)
dataset['Topsis Score'] = score
dataset['Rank'] = (dataset['Topsis Score'].rank(method = 'max',ascending = False))
dataset = dataset.astype({"Rank" : int})
dataset.to_csv(sys.argv[4],index = False)
if __name__ == '__main__' :
main() | 102053005-Aditya-Topsis | /102053005_Aditya_Topsis-0.11-py3-none-any.whl/102053005_Aditya_Topsis/102053005.py | 102053005.py |
import pandas as pd
import numpy as np
import sys
def topsis():
if len(sys.argv)!=5:
print("Wrong command line input")
exit()
try:
with open(sys.argv[1], 'r') as filee:
df=pd.read_csv(filee)
except FileNotFoundError:
print("File not found")
exit()
punctuation_dictionary = {'.':True,'@': True, '^': True, '!': True, ' ': True, '#': True, '%': True,'$': True, '&': True, ')': True, '(': True, '+': True, '*': True,'-': True, '=': True}
punctuation_dictionary2 = {'.':True,'@': True, '^': True, '!': True, ' ': True, '#': True, '%': True,'$': True, '&': True, ')': True, '(': True, '*': True, '=': True}
def char_check(new_list, punct_dict):
for item in new_list:
for char in item:
if char in punct_dict:
return False
def string_check(comma_check_list, punct_dict):
for string in comma_check_list:
new_list = string.split(",")
if char_check(new_list, punct_dict) == False:
print("Invalid input or Values not comma separated")
exit()
string_check(sys.argv[2], punctuation_dictionary)
string_check(sys.argv[3], punctuation_dictionary2)
nCol=len(df.columns)
weights1 = list(sys.argv[2].split(","))
impacts = list(sys.argv[3].split(","))
weights = [eval(i) for i in weights1]
if nCol<3:
print("No of columns are less than 3.")
exit()
if len(impacts) != (nCol-1):
print("No of values in impacts should be same as the number of columns.")
exit()
if len(weights) != (nCol-1):
print("No of values in weights should be same as the number of columns.")
exit()
for i in range(len(impacts)):
if(impacts[i]!="+" and impacts[i]!="-"):
print("Impacts should be either '+' or '-'.")
exit()
for index,row in df.iterrows():
try:
float(row['P1'])
float(row['P2'])
float(row['P3'])
float(row['P4'])
float(row['P5'])
except:
df.drop(index,inplace=True)
df["P1"] = pd.to_numeric(df["P1"], downcast="float")
df["P2"] = pd.to_numeric(df["P2"], downcast="float")
df["P3"] = pd.to_numeric(df["P3"], downcast="float")
df["P4"] = pd.to_numeric(df["P4"], downcast="float")
df["P5"] = pd.to_numeric(df["P5"], downcast="float")
df = df.copy(deep=True)
for i in range(1,nCol):
temp=0
for j in range(len(df)):
temp=temp+df.iloc[j,i]**2
temp=temp**0.5
for j in range(len(df)):
df.iat[j, i] = (df.iloc[j, i] / temp)*weights[i-1]
ideal_best=(df.max().values)[1:]
ideal_worst=(df.min().values)[1:]
for i in range(1,nCol):
if(impacts[i-1]=='-'):
ideal_best[i-1],ideal_worst[i-1]=ideal_worst[i-1],ideal_best[i-1]
score=[]
distance_positive=[]
distance_negative=[]
for i in range(len(df)):
temp_p,temp_n=0,0
for j in range(1,nCol):
temp_p=temp_p + (ideal_best[j-1]-df.iloc[i,j])**2
temp_n=temp_n + (ideal_worst[j-1]-df.iloc[i,j])**2
temp_p=temp_p**0.5
temp_n=temp_n**0.5
score.append(temp_n/(temp_p + temp_n))
distance_negative.append(temp_n)
distance_positive.append(temp_p)
df['distance negative']=distance_negative
df['distance positive']=distance_positive
df['Topsis Score']=score
df['Rank'] = (df['Topsis Score'].rank(method='max', ascending=False))
df = df.astype({"Rank": int})
print(df)
df.to_csv(sys.argv[4],index=False) | 102053042TOPSIS | /102053042TOPSIS-0.0.1-py3-none-any.whl/Topsis_102053042/Topsis_102053042.py | Topsis_102053042.py |
# 10daysWeb
**A just-for-learning web framework that can be developed in 10 days.**
![PyPI](https://img.shields.io/pypi/pyversions/10daysweb.svg) ![PyPI](https://img.shields.io/pypi/status/10daysweb.svg) ![PyPI](https://img.shields.io/pypi/v/10daysweb.svg)
# 啰嗦
出于某些原因,我需要一个自己开发的轮子,大约只有十天时间。
于是我打算开发一个python web框架,这是我一直想做却又未完成的事。
我打算每天迭代,一遍写一遍查阅资料,记录新的想法和发现。
这样如果有谁与我处境相似,这个项目也许能够有所帮助。
最好能用成品再搭个博客什么的。
即使没有成功,也不会一无所获。
我们开始吧。
## Day 1
**万事开头难,相信我不是唯一一个在项目开始时感到无从下手的人。**
首先我下载了热门框架Flask的0.1版本的源码,三百余行的代码已经包含了一个web框架所必要的全部功能,还附带了一个使用示例。[如何下载最早的commit代码](#如何下载最早的commit代码)
对于我要实现的第一个最简单版本来说,flask仍然过于复杂了,我只提炼出`route`这个关键部件在第一版中实现。
`Route`用来管理一个web应用具体响应哪些路径和方法。通过装饰器,框架在启动时注册所有的用户函数,并在符合条件时自动调用。
@testApp.route('/', methods=['GET'])
def hello():
return 'hello world'
而`Rule`则具体表示某个需要被响应的路径,它主要由`url`, `methods`和`endpoint`组成。
`methods`包含一系列HTTP Method,表示要处理的请求类型。而`endpoint`则是实际产生返回内容的`Callable`对象,可以是函数或者类。
关于http包含哪些method,以及后续我们需要参考的报文格式和状态码,参见[RFC 2616](#https://tools.ietf.org/html/rfc2616)。
现在我们还缺少一段代码,用于监听和收发http报文,python3.4以后加入的asyncio提供了这个功能,而[官方文档](#http://asyncio.readthedocs.io)恰好给了我们一个极简的示例。
`asyncio.start_server`需要三个基本参数,收到请求时的自动调用的`client_connected_cb`,以及需要监听的地址和端口。
`client_connected_cb`则需要支持两个参数,`reader`和`writer`,份别用于读取请求报文和回写响应报文。
我在`client_connected_cb`中添加了简易的获取请求的路径的代码,用于和注册好的应用函数匹配。
同样我也已经定义了包含所有Http method的宏,不过还没有与请求进行匹配。
这样我们就得到了一个可以运行的''Web框架'',目前只能算是prototype,不过已经足够让我们印出那句世纪名言了。
Hello World!
## Day 2
**我们有了一个原型,但很多方面亟待完善**
我使用了一个开源第三方库来解析http报文,并实现了`Request`和`Response`来抽象请求。
我从rfc文档中摘取了http的状态码,和methods一起放在`utils.py`中。
尝试定义了一个异常,初步的设向是它可以让框架的使用者随时使用异常直接返回http的错误状态,`content`则是为了支持自定义的错误页面,但这部分仍不确定,也许我会使用`@error_handler`的形式来提供自定义异常时的行为。
添加了log,但在我的终端中还没有输出,待解决。
我使用了标准库`asyncio`,因为我希望这个框架是支持异步的,调整后的`handle`方法提现了处理一个请求的基本思路,但它看起来仍然很糟糕,对于异步我还未完全理清思路。
## Day 3
在代码方面,今天的改动并不大。
梳理了`handle`方法的逻辑, 我强制规定用户函数必须是协程,但日后也必须提供数据库,文件读写相关的异步封装API,否则框架仍然不是`真*异步`。
调整了流读取报文的处理策略,交由第三方解析库来判断报文是否结束。这方面并不用太过纠结,因为真正部署时讲会有nginx/apache之流替我们打理。
之后的主要工作:
- 完成`Debug模式`,实现自动重加载用户函数
- 添加静态文件路由和模式匹配路由支持
- 引入模板引擎及其异步调用封装
## Day 4
添加了动态url匹配支援,现在可以在以如下形式匹配路径:
@app.route('/<name>', methods=['GET'])
async def show_name(request, name):
return Response(content=f'hello {name}')
思考以后感觉静态文件路由完全可以由用户自行添加动态匹配来支持,即使不行还有web服务器来做,于是决定先放下这部分。
添加了`errorhandler`装饰器,现在可以通过它自定义异常时的行为和返回报文
调整了异常捕获机制,现在在找不到对应的用户方法时,能够正确的抛出404异常,而在用户方法中非预期中异常,则统一作为500状态处理
## Day 5 & 6
加入了`run_before`装饰器,用于在运行启动服务器前的初始化代码,默认传入事件循环loop参数
把这个~~丢人~~框架上传到了pip,现在可以通过`pip install 10daysweb`安装使用
尝试写一个todolist应用作为演示,康了半天前端觉得有些仓促,决定接入~~Telegram Bot~~微信小程序
加入了unitest,初步编写了一个url匹配的测试样例
## Day 7
新增信号装饰器,初步想法是用于服务器启动前和结束后初始化和关闭数据库连接池
@app.signal(type='run_before_start')
def foo(loop):
'''init database connection pool'''
增加了对应的未知信号类型异常,微信小程序api编写中。
## 如何下载最早的commit代码
作为一个知名的开源项目,Flask在github已经积累了数千此提交。
最可恨的是,github在Commit列表页面竟然没有提供一个按页跳转的功能。
下面一个不是很优雅,但确实更快的方法
首先在本地`git clone`下目标项目
使用`--reverse`参数倒置结果,拿到提交历史上最早的commit id
git log --reverse
在github上随意打开一个commit,替换掉url中的id即可。
哦,你还需要点一下`Browse files` | 10daysweb | /10daysweb-0.1.3.tar.gz/10daysweb-0.1.3/README.md | README.md |
import asyncio
import logging
import inspect
import re
from typing import Callable, List, AnyStr, Dict, Tuple, Any
import httptools
from .request import Request
from .response import Response
from .exceptions import HttpException, UnknownSignalException
from .utils import HTTP_METHODS, STATUS_CODES, DEFAULT_ERROR_PAGE_TEMPLATE
logger = logging.getLogger('tendaysweb')
logging.basicConfig(level=logging.INFO)
class TenDaysWeb():
_signal_types = ['run_before_start', 'run_after_close']
def __init__(self, application_name):
"""
:param application_name: just name your TenDaysWeb Instance
"""
self._app_name = application_name
self._rule_list: List[Rule] = []
self._error_handlers: Dict[int, Callable] = {}
self._signal_func: Dict[str, List[Callable]] = {
key: []
for key in TenDaysWeb._signal_types
}
def route(self, url: str, methods: List = HTTP_METHODS, **options):
"""
A decorator that is used to register a view function for a
given URL rule. Example::
@app.route('/')
def index():
return 'Hello World'
"""
def decorator(func):
self._rule_list.append(Rule(url, methods, func, **options))
return func
return decorator
def signal(self, signal_type: str):
"""
A decorator that is used to register a function supposed to be called
before start_server
"""
def decorator(func):
if signal_type not in TenDaysWeb._signal_types:
raise UnknownSignalException(signal_type, func.__name__)
self._signal_func[signal_type].append(func)
return func
return decorator
def error_handler(self, error_code):
"""
This decorator is used to customize the behavior of an error
:param error_code:a http status code
"""
async def decorator(func):
self._error_handlers[error_code] = func
return func
return decorator
def match_request(self, request) -> Tuple[Callable, Dict[str, Any]]:
"""
Match each request to a endpoint
if no endpoint is eligable, return None, None
"""
handler = kwargs = None
for rule in self._rule_list:
kwargs = rule.match(request.url, request.method)
if kwargs is not None:
handler = rule._endpoint
break
return handler, kwargs
async def process_request(
self,
request: Request,
handler: Callable,
kwargs: Dict[str, Any]):
"""
:param request: Request instance
:param handler: A endpoint
:param kwargs: the additional parameters for call endpoint
"""
try:
return await handler(request, **kwargs)
except HttpException as e:
# catch exception user explicit rasie in endpoint
handler = self._error_handlers.get(e.err_code, None)
if handler is None:
return Response(
status_code=e.err_code,
content=TenDaysWeb.generate_default_error_page(
e.err_code))
return await handler()
async def handler(self, reader, writer):
"""
The handler handling each request
:param request: the Request instance
:return: The Response instance
"""
while True:
request: Request = await self.read_http_message(reader)
response: Response = Response()
if request is None:
writer.close()
break
handler, kwargs = self.match_request(request)
if handler is None:
response.status_code = 404
response.content = TenDaysWeb.generate_default_error_page(
response.status_code)
else:
try:
response = await self.process_request(
request, handler, kwargs)
except Exception as e:
logger.error(str(e))
response = Response(
status_code=500,
content=TenDaysWeb.generate_default_error_page(500))
# send payload
writer.write(response.to_payload())
try:
await writer.drain()
writer.write_eof()
except ConnectionResetError:
writer.close()
break
async def start_server(self,
loop,
http_handler: Callable,
websocket_handler=None,
address: str = 'localhost',
port: int=8000,):
"""
start server
"""
for func in self._signal_func['run_before_start']:
if inspect.iscoroutinefunction(func):
await func(loop)
else:
func()
await asyncio.start_server(http_handler, address, port)
for func in self._signal_func['run_after_close']:
if inspect.iscoroutinefunction(func):
await func(loop)
else:
func()
def run(self,
host: str = "localhost",
port: int = 8000,
debug: bool = False):
"""
start the http server
:param host: The listening host
:param port: The listening port
:param debug: whether it is in debug mod or not
"""
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(
self.start_server(loop, self.handler, None, host, port))
logger.info(f'Start listening {host}:{port}')
loop.run_forever()
except KeyboardInterrupt:
loop.close()
async def read_http_message(
self, reader: asyncio.streams.StreamReader) -> Request:
"""
this funciton will reading data cyclically
until recivied a complete http message
:param reqreaderuest: the asyncio.streams.StreamReader instance
:return The Request instance
"""
protocol = ParseProtocol()
parser = httptools.HttpRequestParser(protocol)
while True:
data = await reader.read(2 ** 16)
try:
parser.feed_data(data)
except httptools.HttpParserUpgrade:
raise HttpException(400)
if protocol.completed:
return Request.load_from_parser(parser, protocol)
if data == b'':
return None
@staticmethod
def generate_default_error_page(status, reason='', content=''):
return DEFAULT_ERROR_PAGE_TEMPLATE.format(
**{'status': status,
'reason': STATUS_CODES.get(status, 'Unknow'),
'content': content})
class Rule():
parttern = re.compile(r'\<([^/]+)\>')
def __init__(self, url: AnyStr, methods: List, endpoint: Callable,
**options):
"""
A rule describes a url is expected to be handled and how to handle it.
:param url: url to be handled
:param method: list of HTTP method name
:param endpoint: the actual function/class process this request
"""
self._url = url
self._methods = methods
self._options = options
self._endpoint = endpoint
self._param_name_list = Rule.parttern.findall(url)
self._url_pattern = re.compile(
f'''^{Rule.parttern.sub('([^/]+)', url)}$''')
def match(self, url: str, method: str):
"""
this function is used to judge whether a (url, method) matchs the Rule
"""
res = self._url_pattern.search(url)
if method in self._methods and res is not None:
return dict(zip(
self._param_name_list,
[res.group(i) for i in range(
1, self._url_pattern.groups + 1)]))
return None
class ParseProtocol:
"""
The protocol for HttpRequestParser
"""
def __init__(self) -> None:
self.url: str = ''
self.headers: Dict[str, str] = {}
self.body: bytes = b''
self.completed: bool = False
def on_url(self, url: bytes) -> None:
self.url = url.decode()
def on_header(self, name: bytes, value: bytes) -> None:
self.headers[name.decode()] = value.decode()
def on_body(self, body: bytes) -> None:
self.body += body
def on_message_complete(self) -> None:
self.completed = True | 10daysweb | /10daysweb-0.1.3.tar.gz/10daysweb-0.1.3/tendaysweb/app.py | app.py |