code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import os
from setuptools import setup
VERSION = "0.2"
def get_long_description():
with open(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "README.md"),
encoding="utf8",
) as fp:
return fp.read()
setup(
name="instapaper-to-sqlite",
description="Save data from In... | [
"os.path.abspath"
] | [((139, 164), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (154, 164), False, 'import os\n')] |
from typing import List
from pybm import PybmConfig
from pybm.command import CLICommand
from pybm.config import get_reporter_class
from pybm.exceptions import PybmError
from pybm.reporters import BaseReporter
from pybm.status_codes import ERROR, SUCCESS
from pybm.util.path import get_subdirs
class CompareCommand(CLI... | [
"pybm.PybmConfig.load",
"pybm.util.path.get_subdirs",
"pybm.config.get_reporter_class",
"pybm.exceptions.PybmError"
] | [((583, 600), 'pybm.PybmConfig.load', 'PybmConfig.load', ([], {}), '()\n', (598, 600), False, 'from pybm import PybmConfig\n'), ((1475, 1513), 'pybm.config.get_reporter_class', 'get_reporter_class', ([], {'config': 'self.config'}), '(config=self.config)\n', (1493, 1513), False, 'from pybm.config import get_reporter_cla... |
"""
For a given detector get a WIMPrate for a given detector (not taking into
account any detector effects
"""
import numericalunits as nu
import wimprates as wr
import dddm
export, __all__ = dddm.exporter()
@export
class SHM:
"""
class used to pass a halo model to the rate computation
must cont... | [
"dddm.exporter",
"wimprates.observed_speed_dist"
] | [((194, 209), 'dddm.exporter', 'dddm.exporter', ([], {}), '()\n', (207, 209), False, 'import dddm\n'), ((1381, 1431), 'wimprates.observed_speed_dist', 'wr.observed_speed_dist', (['v', 't', 'self.v_0', 'self.v_esc'], {}), '(v, t, self.v_0, self.v_esc)\n', (1403, 1431), True, 'import wimprates as wr\n')] |
# imports
import os
import json
import subprocess
abs_join = lambda p1, p2 : os.path.abspath(os.path.join(p1, p2))
# constants
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
SEED_RELPATH = "./strprose/example_files/_seeds.json"
SEED_FULLPATH = abs_join(SCRIPT_DIR, SEED_RELPATH)
SEED_INFO = None
with open(SEE... | [
"json.load",
"os.path.dirname",
"subprocess.run",
"os.path.join"
] | [((158, 183), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (173, 183), False, 'import os\n'), ((359, 371), 'json.load', 'json.load', (['f'], {}), '(f)\n', (368, 371), False, 'import json\n'), ((93, 113), 'os.path.join', 'os.path.join', (['p1', 'p2'], {}), '(p1, p2)\n', (105, 113), False, 'i... |
import sys
from django.urls import resolve
def global_vars(request):
return {
'GLOBAL_TWITTER_ACCOUNT': '@open_apprentice',
'ORGANIZATION_NAME': 'Open Apprentice Foundation',
'ORGANIZATION_WEBSITE': 'https://openapprentice.org',
'ORGANIZATION_LOGO': '/static/img/ellie/open-apprenti... | [
"django.urls.resolve"
] | [((495, 521), 'django.urls.resolve', 'resolve', (['request.path_info'], {}), '(request.path_info)\n', (502, 521), False, 'from django.urls import resolve\n')] |
""" Training script for steps_with_decay policy"""
import argparse
import os
import sys
import pickle
import resource
import traceback
import logging
from collections import defaultdict
import numpy as np
import yaml
import torch
from torch.autograd import Variable
import torch.nn as nn
import cv2
cv2.setNumThreads(0... | [
"core.config.cfg_from_file",
"logging.getLogger",
"core.config.assert_and_infer_cfg",
"utils.training_stats.TrainingStats",
"torch.cuda.device_count",
"core.config.cfg_from_list",
"torch.cuda.is_available",
"sys.exit",
"logging.info",
"utils.logging.setup_logging",
"os.path.exists",
"utils.net... | [((301, 321), 'cv2.setNumThreads', 'cv2.setNumThreads', (['(0)'], {}), '(0)\n', (318, 321), False, 'import cv2\n'), ((1022, 1045), 'utils.logging.setup_logging', 'setup_logging', (['__name__'], {}), '(__name__)\n', (1035, 1045), False, 'from utils.logging import setup_logging\n'), ((1188, 1230), 'resource.getrlimit', '... |
# Copyright (c) 2019-2020 hippo91 <<EMAIL>>
# Copyright (c) 2020 <NAME> <<EMAIL>>
# Copyright (c) 2021 <NAME> <<EMAIL>>
# Copyright (c) 2021 <NAME> <<EMAIL>>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
"""Astr... | [
"astroid.builder.parse",
"functools.partial",
"astroid.manager.AstroidManager",
"astroid.inference_tip.inference_tip"
] | [((765, 964), 'astroid.builder.parse', 'parse', (['"""\n # different functions defined in multiarray.py\n def inner(a, b):\n return numpy.ndarray([0, 0])\n\n def vdot(a, b):\n return numpy.ndarray([0, 0])\n """'], {}), '(\n """\n # different functions defined in multiarray.py\n de... |
import sys
import typing
import numpy as np
def solve(a: np.ndarray, k: int) -> typing.NoReturn:
n = len(a)
def compute_dp(a: np.ndarray) -> np.ndarray:
dp = np.zeros((n + 1, k), np.bool8)
dp[0, 0] = True
for i in range(n):
dp[i + 1] = dp[i].copy()
... | [
"sys.stdin.readline",
"numpy.zeros",
"numpy.flatnonzero"
] | [((188, 218), 'numpy.zeros', 'np.zeros', (['(n + 1, k)', 'np.bool8'], {}), '((n + 1, k), np.bool8)\n', (196, 218), True, 'import numpy as np\n'), ((605, 622), 'numpy.flatnonzero', 'np.flatnonzero', (['l'], {}), '(l)\n', (619, 622), True, 'import numpy as np\n'), ((968, 988), 'sys.stdin.readline', 'sys.stdin.readline', ... |
import network
def conncb(task):
print("[{}] Connected".format(task))
def disconncb(task):
print("[{}] Disconnected".format(task))
def subscb(task):
print("[{}] Subscribed".format(task))
def pubcb(pub):
print("[{}] Published: {}".format(pub[0], pub[1]))
def datacb(msg):
print("[{}] Data arrived... | [
"network.mqtt"
] | [((390, 608), 'network.mqtt', 'network.mqtt', (['"""loboris"""', '"""mqtt://loboris.eu"""'], {'user': '"""wifimcu"""', 'password': '"""<PASSWORD>"""', 'cleansession': '(True)', 'connected_cb': 'conncb', 'disconnected_cb': 'disconncb', 'subscribed_cb': 'subscb', 'published_cb': 'pubcb', 'data_cb': 'datacb'}), "('loboris... |
# flake8: NOQA E501
import ast
import random
from textwrap import dedent
from typing import List
from main.exercises import generate_list, generate_string
from main.text import ExerciseStep, MessageStep, Page, Step, VerbatimStep, search_ast
from main.utils import returns_stdout
class IntroducingLists(Page):
clas... | [
"ast.Attribute",
"textwrap.dedent",
"ast.Constant",
"random.choice",
"random.shuffle",
"main.exercises.generate_list",
"random.randrange",
"main.exercises.generate_string",
"ast.Name",
"random.randint"
] | [((7157, 7185), 'random.choice', 'random.choice', (['[True, False]'], {}), '([True, False])\n', (7170, 7185), False, 'import random\n'), ((7207, 7225), 'main.exercises.generate_list', 'generate_list', (['int'], {}), '(int)\n', (7220, 7225), False, 'from main.exercises import generate_list, generate_string\n'), ((12998,... |
##########################################################################
#
# Copyright (c) 2013, <NAME>. All rights reserved.
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the f... | [
"GafferUI.PlugValueWidget.__init__",
"GafferUI.PlugValueWidget.setHighlighted",
"imath.Color4f",
"GafferUI.ScriptWindow.acquire",
"GafferUI.ColorChooser.changesShouldBeMerged",
"Gaffer.WeakMethod",
"GafferUI.WidgetAlgo.keepUntilIdle",
"GafferUI.ColorSwatch",
"GafferUI.Pointer.setCurrent",
"Gaffer.... | [((2030, 2052), 'GafferUI.ColorSwatch', 'GafferUI.ColorSwatch', ([], {}), '()\n', (2050, 2052), False, 'import GafferUI\n'), ((2056, 2123), 'GafferUI.PlugValueWidget.__init__', 'GafferUI.PlugValueWidget.__init__', (['self', 'self.__swatch', 'plugs'], {}), '(self, self.__swatch, plugs, **kw)\n', (2089, 2123), False, 'im... |
#!/usr/bin/env python
# Copyright 2016 <NAME>. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
"""
cmd_exec_test.py: Tests ... | [
"osh.state.SetLocalString",
"core.test_lib.InitExecutor",
"core.test_lib.InitCommandParser",
"core.meta.syntax_asdl.token",
"unittest.main",
"core.test_lib.MakeTestEvaluator",
"core.test_lib.MakeArena"
] | [((576, 604), 'core.test_lib.MakeTestEvaluator', 'test_lib.MakeTestEvaluator', ([], {}), '()\n', (602, 604), False, 'from core import test_lib\n'), ((607, 652), 'osh.state.SetLocalString', 'state.SetLocalString', (['word_ev.mem', '"""x"""', '"""xxx"""'], {}), "(word_ev.mem, 'x', 'xxx')\n", (627, 652), False, 'from osh ... |
# Generated by Django 2.0.8 on 2019-05-29 16:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blitz_api', '0019_merge_20190524_1719'),
]
operations = [
migrations.AlterField(
model_name='exportmedia',
name='fil... | [
"django.db.models.FileField"
] | [((342, 406), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': '"""export/%Y/%m/"""', 'verbose_name': '"""file"""'}), "(upload_to='export/%Y/%m/', verbose_name='file')\n", (358, 406), False, 'from django.db import migrations, models\n')] |
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gio, Gtk, Gdk
class AddFriendWidget(Gtk.Box):
def __init__(self, main_window, fchat_prv, friend_list):
Gtk.Box.__init__(self, spacing=7, orientation = Gtk.Orientation.VERTICAL)
self.fchat_prv = fchat_prv
self.main_window... | [
"gi.repository.Gtk.Box",
"gi.repository.Gtk.Box.__init__",
"gi.repository.Gtk.Spinner",
"gi.require_version",
"gi.repository.Gtk.Entry",
"gi.repository.Gtk.MessageDialog",
"gi.repository.Gtk.Clipboard.get",
"gi.repository.Gtk.Button"
] | [((10, 42), 'gi.require_version', 'gi.require_version', (['"""Gtk"""', '"""3.0"""'], {}), "('Gtk', '3.0')\n", (28, 42), False, 'import gi\n'), ((186, 257), 'gi.repository.Gtk.Box.__init__', 'Gtk.Box.__init__', (['self'], {'spacing': '(7)', 'orientation': 'Gtk.Orientation.VERTICAL'}), '(self, spacing=7, orientation=Gtk.... |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not u... | [
"random.choice",
"requests.packages.urllib3.disable_warnings",
"time.sleep",
"nose.plugins.attrib.attr"
] | [((1176, 1220), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', ([], {}), '()\n', (1218, 1220), False, 'import requests\n'), ((5319, 5406), 'nose.plugins.attrib.attr', 'attr', ([], {'tags': "['advanced', 'advancedns', 'smoke', 'basic']", 'required_hardware': '"""false"""'}), ... |
"""Test OpenZWave Websocket API."""
from unittest.mock import patch
from openzwavemqtt.const import (
ATTR_CODE_SLOT,
ATTR_LABEL,
ATTR_OPTIONS,
ATTR_POSITION,
ATTR_VALUE,
ValueType,
)
from openpeerpower.components.ozw.const import ATTR_CONFIG_PARAMETER
from openpeerpower.components.ozw.lock im... | [
"unittest.mock.patch"
] | [((11067, 11107), 'unittest.mock.patch', 'patch', (['"""openzwavemqtt.OZWOptions.listen"""'], {}), "('openzwavemqtt.OZWOptions.listen')\n", (11072, 11107), False, 'from unittest.mock import patch\n')] |
import re
import datetime
from javaccflab.lexer import parse
from javaccflab.java_token import TokenType, Token, update_token_value
class Formatter:
def __init__(self, files):
self.__files = files
self.__file = None
self.__tokens = []
self.__to_fix = dict()
def process(self):
... | [
"javaccflab.java_token.update_token_value",
"re.sub",
"datetime.date.today",
"javaccflab.java_token.Token"
] | [((26009, 26054), 're.sub', 're.sub', (['"""(.)([A-Z][a-z]+)"""', '"""\\\\1_\\\\2"""', 'naming'], {}), "('(.)([A-Z][a-z]+)', '\\\\1_\\\\2', naming)\n", (26015, 26054), False, 'import re\n'), ((9361, 9391), 'javaccflab.java_token.Token', 'Token', (['None', 'TokenType.COMMENT'], {}), '(None, TokenType.COMMENT)\n', (9366,... |
import sqlite3
class ManageData:
def __init__(self, queue_tracker_db, email_tracker_db, delivery_tracker_db):
self.queue_tracker_db = queue_tracker_db
self.email_tracker_db = email_tracker_db
self.delivery_tracker_db = delivery_tracker_db
def manage_queue_tracker(self, fields):
... | [
"sqlite3.connect"
] | [((2551, 2577), 'sqlite3.connect', 'sqlite3.connect', (['self.path'], {}), '(self.path)\n', (2566, 2577), False, 'import sqlite3\n')] |
import math
def g_path_regularize(fake_img, latents, mean_path_length, decay=0.01):
noise = torch.randn_like(fake_img) / math.sqrt(
fake_img.shape[2] * fake_img.shape[3]
)
grad, = autograd.grad(
outputs=(fake_img * noise).sum(), inputs=latents, create_graph=True
)
path_lengths = tor... | [
"math.sqrt"
] | [((126, 174), 'math.sqrt', 'math.sqrt', (['(fake_img.shape[2] * fake_img.shape[3])'], {}), '(fake_img.shape[2] * fake_img.shape[3])\n', (135, 174), False, 'import math\n')] |
import numpy as np
from typing import Tuple, Union, Optional
from autoarray.structures.arrays.two_d import array_2d_util
from autoarray.geometry import geometry_util
from autoarray import numba_util
from autoarray.mask import mask_2d_util
@numba_util.jit()
def grid_2d_centre_from(grid_2d_slim: np.ndarray) ... | [
"autoarray.geometry.geometry_util.central_scaled_coordinate_2d_from",
"numpy.mean",
"autoarray.structures.arrays.two_d.array_2d_util.array_2d_native_from",
"numpy.roll",
"numpy.asarray",
"autoarray.numba_util.jit",
"numpy.max",
"numpy.subtract",
"numpy.stack",
"numpy.zeros",
"numpy.square",
"a... | [((252, 268), 'autoarray.numba_util.jit', 'numba_util.jit', ([], {}), '()\n', (266, 268), False, 'from autoarray import numba_util\n'), ((824, 840), 'autoarray.numba_util.jit', 'numba_util.jit', ([], {}), '()\n', (838, 840), False, 'from autoarray import numba_util\n'), ((11036, 11052), 'autoarray.numba_util.jit', 'num... |
# coding: utf-8
import requests, math
import gevent
from gevent.queue import Queue
from gevent import monkey; monkey.patch_all()
from pyquery import PyQuery
class Proxies():
def __init__(self):
self.domestic_gn_url = 'http://www.kuaidaili.com/free/inha/{0}/'
self.domestic_pt_url = 'http://www.kuai... | [
"math.ceil",
"requests.Session",
"gevent.monkey.patch_all",
"gevent.queue.Queue",
"gevent.joinall",
"pyquery.PyQuery",
"gevent.spawn"
] | [((111, 129), 'gevent.monkey.patch_all', 'monkey.patch_all', ([], {}), '()\n', (127, 129), False, 'from gevent import monkey\n'), ((536, 554), 'requests.Session', 'requests.Session', ([], {}), '()\n', (552, 554), False, 'import requests, math\n'), ((1726, 1733), 'gevent.queue.Queue', 'Queue', ([], {}), '()\n', (1731, 1... |
# coding: utf-8
"""
Name: upper_air_humidity.py
Make upper level weather chart.
Usage: python3 upper_air_humidity.py --file <ncfile>
Author: <NAME>
Date: 2022/01/07
"""
import argparse
from ncmagics import fetchtime, japanmap, meteotool
def parse_args() -> dict:
"""parse_args.
set file path.
Args:
... | [
"ncmagics.japanmap.JpMap",
"ncmagics.fetchtime.fetch_time",
"argparse.ArgumentParser",
"ncmagics.meteotool.MeteoTools"
] | [((366, 391), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (389, 391), False, 'import argparse\n'), ((756, 784), 'ncmagics.fetchtime.fetch_time', 'fetchtime.fetch_time', (['ncfile'], {}), '(ncfile)\n', (776, 784), False, 'from ncmagics import fetchtime, japanmap, meteotool\n'), ((937, 971), '... |
from datetime import date
from django.core.cache import cache
from django.db.models import Q, F
from django.shortcuts import render
from django.shortcuts import get_object_or_404
from django.views.generic import ListView, DetailView
#from silk.profiling.profiler import silk_profile
from config.models import SideBar
f... | [
"django.shortcuts.get_object_or_404",
"django.db.models.F",
"datetime.date.today",
"django.core.cache.cache.set",
"django.db.models.Q",
"config.models.SideBar.objects.filter",
"django.core.cache.cache.get"
] | [((653, 703), 'config.models.SideBar.objects.filter', 'SideBar.objects.filter', ([], {'status': 'SideBar.STATUS_SHOW'}), '(status=SideBar.STATUS_SHOW)\n', (675, 703), False, 'from config.models import SideBar\n'), ((1387, 1430), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Category'], {'pk': 'category_... |
# -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# Adapted from PyQtGraph
import sys
from . import ptime
from .. import config
class Profiler(object):
"""Simple profiler allowing directed, hierarchical measurement of ti... | [
"sys._getframe"
] | [((2435, 2451), 'sys._getframe', 'sys._getframe', (['(1)'], {}), '(1)\n', (2448, 2451), False, 'import sys\n')] |
from django.contrib.auth import get_user_model
from djangosaml2idp.processors import BaseProcessor
User = get_user_model()
class TestBaseProcessor:
def test_extract_user_id_configure_by_user_class(self):
user = User()
user.USERNAME_FIELD = 'email'
user.email = 'test_email'
asse... | [
"django.contrib.auth.get_user_model",
"djangosaml2idp.processors.BaseProcessor"
] | [((107, 123), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (121, 123), False, 'from django.contrib.auth import get_user_model\n'), ((323, 349), 'djangosaml2idp.processors.BaseProcessor', 'BaseProcessor', (['"""entity-id"""'], {}), "('entity-id')\n", (336, 349), False, 'from djangosaml2idp.p... |
from datetime import datetime
from typing import Any, List, Optional, Union
from pydantic import BaseModel, Field, HttpUrl, validator
from pydantic.dataclasses import dataclass
class Index(BaseModel):
id: str
name: str
time_gate: HttpUrl = Field(alias="timegate")
cdx_api: HttpUrl = Field(alias="cdx-a... | [
"datetime.datetime.strptime",
"pydantic.Field",
"pydantic.validator",
"pydantic.dataclasses.dataclass"
] | [((328, 350), 'pydantic.dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (337, 350), False, 'from pydantic.dataclasses import dataclass\n'), ((459, 481), 'pydantic.dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (468, 481), False, 'from pydantic.dat... |
import re
import os
from bs4 import BeautifulSoup
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files import File
from pages.models import Page, Image
PEP_TEMPLATE = 'pages/pep-page.html'
pep_url = lambda num: 'dev/peps/pep-{}/'.format(num)
def check_pat... | [
"os.path.exists",
"django.core.files.File",
"re.compile",
"os.path.join",
"pages.models.Image.objects.filter",
"pages.models.Image",
"bs4.BeautifulSoup",
"re.sub",
"pages.models.Page.objects.get_or_create",
"django.core.exceptions.ImproperlyConfigured",
"re.search"
] | [((854, 907), 'os.path.join', 'os.path.join', (['settings.PEP_REPO_PATH', '"""pep-0000.html"""'], {}), "(settings.PEP_REPO_PATH, 'pep-0000.html')\n", (866, 907), False, 'import os\n'), ((962, 989), 'bs4.BeautifulSoup', 'BeautifulSoup', (['pep0_content'], {}), '(pep0_content)\n', (975, 989), False, 'from bs4 import Beau... |
from flask import Flask
app = Flask(__name__, static_folder='static')
from app import routes
| [
"flask.Flask"
] | [((31, 70), 'flask.Flask', 'Flask', (['__name__'], {'static_folder': '"""static"""'}), "(__name__, static_folder='static')\n", (36, 70), False, 'from flask import Flask\n')] |
import logging
import george
import numpy as np
from robo.priors.default_priors import DefaultPrior
from robo.models.gaussian_process import GaussianProcess
from robo.models.gaussian_process_mcmc import GaussianProcessMCMC
from robo.maximizers.random_sampling import RandomSampling
from robo.maximizers.scipy_optimizer ... | [
"logging.getLogger",
"george.kernels.Matern52Kernel",
"numpy.ones",
"robo.maximizers.random_sampling.RandomSampling",
"robo.models.gaussian_process_mcmc.GaussianProcessMCMC",
"robo.maximizers.differential_evolution.DifferentialEvolution",
"robo.acquisition_functions.information_gain.InformationGain",
... | [((748, 775), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (765, 775), False, 'import logging\n'), ((2351, 2372), 'numpy.all', 'np.all', (['(lower < upper)'], {}), '(lower < upper)\n', (2357, 2372), True, 'import numpy as np\n'), ((2665, 2682), 'numpy.ones', 'np.ones', (['[n_dims]'], {}... |
# Generated by Django 3.1.13 on 2021-10-29 11:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0095_bisericapage_utitle'),
]
operations = [
migrations.AddField(
model_name='bisericapage',
name='datare_an... | [
"django.db.models.IntegerField"
] | [((341, 383), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (360, 383), False, 'from django.db import migrations, models\n')] |
"""
Functions for loading input data.
Author: <NAME> <<EMAIL>>
"""
import os
import numpy as np
def load_img(path: str, img_nums: list, shape: tuple) -> np.array:
"""
Loads a image in the human-readable format.
Args:
path:
The path to the to the folder with mnist images.
i... | [
"numpy.array",
"numpy.prod",
"numpy.zeros"
] | [((2132, 2163), 'numpy.zeros', 'np.zeros', (['num_images'], {'dtype': 'int'}), '(num_images, dtype=int)\n', (2140, 2163), True, 'import numpy as np\n'), ((2071, 2091), 'numpy.prod', 'np.prod', (['image_shape'], {}), '(image_shape)\n', (2078, 2091), True, 'import numpy as np\n'), ((821, 835), 'numpy.array', 'np.array', ... |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the Apache 2.0 License.
import infra.e2e_args
import infra.ccf
import infra.jsonrpc
import logging
from time import gmtime, strftime
import csv
import random
from loguru import logger as LOG
class AppUser:
def __init__(self, network, n... | [
"loguru.logger.success",
"csv.DictReader",
"loguru.logger.debug",
"time.gmtime"
] | [((8939, 9005), 'loguru.logger.success', 'LOG.success', (['f"""{tx_id} transactions have been successfully issued"""'], {}), "(f'{tx_id} transactions have been successfully issued')\n", (8950, 9005), True, 'from loguru import logger as LOG\n'), ((1723, 1740), 'csv.DictReader', 'csv.DictReader', (['f'], {}), '(f)\n', (1... |
# Generated by Django 3.0.2 on 2020-03-17 08:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myApp', '0016_usergroup_buyer'),
]
operations = [
migrations.CreateModel(
name='Chat',
fields=[
('id... | [
"django.db.models.CharField"
] | [((323, 389), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(31)', 'primary_key': '(True)', 'serialize': '(False)'}), '(max_length=31, primary_key=True, serialize=False)\n', (339, 389), False, 'from django.db import migrations, models\n'), ((421, 455), 'django.db.models.CharField', 'models.Char... |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or ... | [
"base64.b64encode",
"json.dumps"
] | [((950, 984), 'base64.b64encode', 'base64.b64encode', (['raw_file_content'], {}), '(raw_file_content)\n', (966, 984), False, 'import base64\n'), ((1168, 1188), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (1178, 1188), False, 'import json\n')] |
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import os
import tensorflow as tf
'''
gluoncv backbone + multi_gpu
'''
# ------------------------------------------------
VERSION = 'Cascade_FPN_Res50_COCO_1x_20190421_v3'
NET_NAME = 'resnet50_v1d'
ADD_BOX_IN_TENSORBOARD = True
... | [
"os.path.abspath",
"os.path.join",
"tensorflow.random_normal_initializer"
] | [((389, 411), 'os.path.abspath', 'os.path.abspath', (['"""../"""'], {}), "('../')\n", (404, 411), False, 'import os\n'), ((1096, 1145), 'os.path.join', 'os.path.join', (['ROOT_PATH', '"""output/trained_weights"""'], {}), "(ROOT_PATH, 'output/trained_weights')\n", (1108, 1145), False, 'import os\n'), ((2554, 2605), 'ten... |
import numpy as np
from stumpff import C, S
from CelestialBody import BODIES
from numerical import newton, laguerre
from lagrange import calc_f, calc_fd, calc_g, calc_gd
def kepler_chi(chi, alpha, r0, vr0, mu, dt):
''' Kepler's Equation of the universal anomaly, modified
for use in numerical solvers. '''
... | [
"numpy.abs",
"numpy.allclose",
"numpy.sqrt",
"lagrange.calc_f",
"numerical.laguerre",
"lagrange.calc_g",
"numpy.array",
"numpy.dot",
"numerical.newton",
"lagrange.calc_fd",
"lagrange.calc_gd",
"numpy.cos",
"numpy.linalg.norm",
"numpy.sin",
"stumpff.S",
"stumpff.C"
] | [((953, 957), 'stumpff.S', 'S', (['z'], {}), '(z)\n', (954, 957), False, 'from stumpff import C, S\n'), ((2171, 2190), 'numpy.linalg.norm', 'np.linalg.norm', (['r_0'], {}), '(r_0)\n', (2185, 2190), True, 'import numpy as np\n'), ((2235, 2254), 'numpy.linalg.norm', 'np.linalg.norm', (['v_0'], {}), '(v_0)\n', (2249, 2254... |
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by... | [
"official.nlp.transformer.utils.tokenizer._escape_token",
"official.nlp.transformer.utils.tokenizer._unescape_token",
"tensorflow.io.gfile.GFile",
"official.nlp.transformer.utils.tokenizer._join_tokens_to_string",
"official.nlp.transformer.utils.tokenizer._list_to_index_dict",
"official.nlp.transformer.ut... | [((6843, 6857), 'tensorflow.test.main', 'tf.test.main', ([], {}), '()\n', (6855, 6857), True, 'import tensorflow as tf\n'), ((907, 948), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'delete': '(False)'}), '(delete=False)\n', (934, 948), False, 'import tempfile\n'), ((1110, 1168), 'official.nlp.tr... |
# -*- coding: utf-8 -*-
# Thanks to @skelsec for his awesome tool Pypykatz
# Checks his project here: https://github.com/skelsec/pypykatz
import codecs
import traceback
from lazagne.config.module_info import ModuleInfo
from lazagne.config.constant import constant
from pypykatz.pypykatz import pypykatz
... | [
"traceback.format_exc",
"codecs.encode",
"lazagne.config.module_info.ModuleInfo.__init__",
"pypykatz.pypykatz.pypykatz.go_live"
] | [((579, 647), 'lazagne.config.module_info.ModuleInfo.__init__', 'ModuleInfo.__init__', (['self', '"""pypykatz"""', '"""windows"""'], {'system_module': '(True)'}), "(self, 'pypykatz', 'windows', system_module=True)\n", (598, 647), False, 'from lazagne.config.module_info import ModuleInfo\n'), ((725, 743), 'pypykatz.pypy... |
from django.contrib.auth.models import Permission, User
from django.db import models
class Album(models.Model):
user = models.ForeignKey(User, default=1,on_delete=models.CASCADE)
artist = models.CharField(max_length=250)
album_title = models.CharField(max_length=500)
genre = models.CharField(max_lengt... | [
"django.db.models.FileField",
"django.db.models.CharField",
"django.db.models.BooleanField",
"django.db.models.ForeignKey"
] | [((125, 185), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'default': '(1)', 'on_delete': 'models.CASCADE'}), '(User, default=1, on_delete=models.CASCADE)\n', (142, 185), False, 'from django.db import models\n'), ((198, 230), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250... |
"""Configures a Kafka Connector for Postgres Station data"""
import json
import logging
import requests
from settings import Settings
logger = logging.getLogger(__name__)
KAFKA_CONNECT_URL = f"{Settings.URLs.KAFKA_CONNECT_URL}/connectors"
CONNECTOR_NAME = "stations"
def configure_connector():
"""Starts and co... | [
"logging.getLogger",
"requests.post",
"logging.debug",
"json.dumps",
"requests.get",
"logging.info"
] | [((145, 172), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (162, 172), False, 'import logging\n'), ((364, 428), 'logging.debug', 'logging.debug', (['"""Creating or updating kafka connect connector..."""'], {}), "('Creating or updating kafka connect connector...')\n", (377, 428), False, ... |
import io
import logging
import json
import numpy
import torch
import numpy as np
from tqdm import tqdm
from clie.inputters import constant
from clie.objects import Sentence
from torch.utils.data import Dataset
from torch.utils.data.sampler import Sampler
logger = logging.getLogger(__name__)
def load_word_embeddings... | [
"logging.getLogger",
"numpy.random.random",
"torch.LongTensor",
"clie.objects.Sentence",
"io.open",
"numpy.argsort",
"numpy.array",
"json.load",
"numpy.random.shuffle"
] | [((266, 293), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (283, 293), False, 'import logging\n'), ((364, 431), 'io.open', 'io.open', (['file', '"""r"""'], {'encoding': '"""utf-8"""', 'newline': '"""\n"""', 'errors': '"""ignore"""'}), "(file, 'r', encoding='utf-8', newline='\\n', errors... |
from distutils.extension import Extension
cmdclass = {}
try:
# with Cython
from Cython.Build import build_ext
cmdclass["build_ext"] = build_ext
module_src = "cgranges/python/cgranges.pyx"
except ImportError: # without Cython
module_src = "cgranges/python/cgranges.c"
def build(setup_kwargs):
... | [
"distutils.extension.Extension"
] | [((479, 666), 'distutils.extension.Extension', 'Extension', (['"""cgranges"""'], {'sources': "[module_src, 'cgranges/cgranges.c']", 'depends': "['cgranges/cgranges.h', 'cgranges/khash.h', 'cgranges/python/cgranges.pyx']", 'include_dirs': "['cgranges']"}), "('cgranges', sources=[module_src, 'cgranges/cgranges.c'], depen... |
# coding: UTF-8
import time
import torch
import numpy as np
from train_eval import train, init_network
from importlib import import_module
import argparse
parser = argparse.ArgumentParser(description='Chinese Text Classification')
parser.add_argument('--model', type=str, required=True, help='choose a model: TextCNN')
... | [
"torch.cuda.manual_seed_all",
"torch.manual_seed",
"utils.get_time_dif",
"importlib.import_module",
"argparse.ArgumentParser",
"config.Config",
"train_eval.init_network",
"train_eval.train",
"utils.build_iterator",
"utils.build_dataset",
"numpy.random.seed",
"time.time"
] | [((165, 231), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Chinese Text Classification"""'}), "(description='Chinese Text Classification')\n", (188, 231), False, 'import argparse\n'), ((820, 857), 'importlib.import_module', 'import_module', (["('models.' + model_name)"], {}), "('models... |
#!/usr/bin/python3
# -*- coding: UTF-8 -*-
import time
busyTime = 10
idleTime = busyTime
while True:
start = time.clock()
while time.clock() - start < busyTime:
pass
time.sleep(busyTime / 1000)
| [
"time.sleep",
"time.clock"
] | [((117, 129), 'time.clock', 'time.clock', ([], {}), '()\n', (127, 129), False, 'import time\n'), ((190, 217), 'time.sleep', 'time.sleep', (['(busyTime / 1000)'], {}), '(busyTime / 1000)\n', (200, 217), False, 'import time\n'), ((140, 152), 'time.clock', 'time.clock', ([], {}), '()\n', (150, 152), False, 'import time\n'... |
"""
Module for working with named and anonymous maps
.. module:: carto.maps
:platform: Unix, Windows
:synopsis: Module for working with named and anonymous maps
.. moduleauthor:: <NAME> <<EMAIL>>
.. moduleauthor:: <NAME> <<EMAIL>>
"""
try:
from urllib.parse import urljoin
except ImportError:
from url... | [
"urlparse.urljoin"
] | [((2391, 2457), 'urlparse.urljoin', 'urljoin', (['base_url', '"""{template_id}/{layer}/attributes/{feature_id}"""'], {}), "(base_url, '{template_id}/{layer}/attributes/{feature_id}')\n", (2398, 2457), False, 'from urlparse import urljoin\n'), ((3802, 3842), 'urlparse.urljoin', 'urljoin', (['url', '"""?auth_token={auth_... |
from kv_client.kv_client import KVClient
def main():
kvSlave = KVClient(1, "127.0.0.1", 3456)
kvSlave.start()
if __name__ == "__main__":
main() | [
"kv_client.kv_client.KVClient"
] | [((68, 98), 'kv_client.kv_client.KVClient', 'KVClient', (['(1)', '"""127.0.0.1"""', '(3456)'], {}), "(1, '127.0.0.1', 3456)\n", (76, 98), False, 'from kv_client.kv_client import KVClient\n')] |
"""Python interfaces to DGL farthest point sampler."""
from dgl._ffi.base import DGLError
import numpy as np
from .._ffi.function import _init_api
from .. import backend as F
from .. import ndarray as nd
def _farthest_point_sampler(data, batch_size, sample_points, dist, start_idx, result):
r"""Farthest Point Samp... | [
"dgl._ffi.base.DGLError",
"numpy.unique"
] | [((3538, 3569), 'dgl._ffi.base.DGLError', 'DGLError', (['"""Find unmatched node"""'], {}), "('Find unmatched node')\n", (3546, 3569), False, 'from dgl._ffi.base import DGLError\n'), ((3813, 3858), 'numpy.unique', 'np.unique', (['node_label_np'], {'return_inverse': '(True)'}), '(node_label_np, return_inverse=True)\n', (... |
import shutil
import hashlib
from pathlib import Path
from typing import TextIO, BinaryIO, IO, Union
from datetime import datetime
from os.path import getmtime
from .low import ObservableDict
class Data:
def __init__(self, data_name: str, parent, bucket,
protected_parent_methods: Union[None, dict... | [
"hashlib.sha256",
"hashlib.md5",
"pathlib.Path",
"datetime.datetime.now",
"hashlib.sha1"
] | [((4878, 4891), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (4889, 4891), False, 'import hashlib\n'), ((5555, 5569), 'hashlib.sha1', 'hashlib.sha1', ([], {}), '()\n', (5567, 5569), False, 'import hashlib\n'), ((6244, 6260), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (6258, 6260), False, 'import hashlib\n... |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applica... | [
"tensorflow.python.keras.backend.get_value",
"tensorflow.python.keras.initializers.get",
"tensorflow.python.util.tf_export.keras_export",
"tensorflow.python.ops.gradients.gradients",
"tensorflow.python.framework.ops.init_scope",
"tensorflow.python.framework.ops.get_default_graph",
"tensorflow.python.ops... | [((2908, 2938), 'six.add_metaclass', 'six.add_metaclass', (['abc.ABCMeta'], {}), '(abc.ABCMeta)\n', (2925, 2938), False, 'import six\n'), ((2940, 2982), 'tensorflow.python.util.tf_export.keras_export', 'keras_export', (['"""keras.optimizers.Optimizer"""'], {}), "('keras.optimizers.Optimizer')\n", (2952, 2982), False, '... |
import boto3
import src.app as app
import csv
import psycopg2 as ps
import os
from dotenv import load_dotenv
load_dotenv()
dbname = os.environ["db"]
host = os.environ["host"]
port = os.environ["port"]
user = os.environ["user"]
password = os.environ["pass"]
connection = ps.connect(dbname=dbname,
... | [
"psycopg2.connect",
"src.app.start_app",
"boto3.client",
"dotenv.load_dotenv"
] | [((111, 124), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (122, 124), False, 'from dotenv import load_dotenv\n'), ((273, 350), 'psycopg2.connect', 'ps.connect', ([], {'dbname': 'dbname', 'host': 'host', 'port': 'port', 'user': 'user', 'password': 'password'}), '(dbname=dbname, host=host, port=port, user=user... |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# (C) British Crown Copyright 2017-2021 Met Office.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions a... | [
"numpy.ones",
"improver.synthetic_data.set_up_test_cubes.set_up_probability_cube",
"pytest.raises",
"improver.precipitation_type.utilities.make_shower_condition_cube",
"numpy.arange"
] | [((2118, 2141), 'numpy.arange', 'np.arange', (['n_thresholds'], {}), '(n_thresholds)\n', (2127, 2141), True, 'import numpy as np\n'), ((2238, 2271), 'numpy.ones', 'np.ones', (['shape'], {'dtype': 'FLOAT_DTYPE'}), '(shape, dtype=FLOAT_DTYPE)\n', (2245, 2271), True, 'import numpy as np\n'), ((2283, 2427), 'improver.synth... |
# Copyright 2014 varnishapi authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import time
from feaas import storage
class Base(object):
def __init__(self, manager, interval, *locks):
self.manager = manager
self.st... | [
"feaas.storage.MultiLocker",
"time.sleep"
] | [((440, 473), 'feaas.storage.MultiLocker', 'storage.MultiLocker', (['self.storage'], {}), '(self.storage)\n', (459, 473), False, 'from feaas import storage\n'), ((663, 688), 'time.sleep', 'time.sleep', (['self.interval'], {}), '(self.interval)\n', (673, 688), False, 'import time\n')] |
# Copyright 2014 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law... | [
"os.path.join",
"logging.debug"
] | [((804, 881), 'os.path.join', 'os.path.join', (['debian_repository_baseurl', '"""dists"""', 'target.os_code_name', '"""main"""'], {}), "(debian_repository_baseurl, 'dists', target.os_code_name, 'main')\n", (816, 881), False, 'import os\n'), ((1127, 1177), 'logging.debug', 'logging.debug', (["('Reading file: %s' % cache... |
# Copyright 2022 ConvolutedDog (https://github.com/ConvolutedDog/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless ... | [
"torch.nn.ReLU",
"torch.nn.CrossEntropyLoss",
"torch.max",
"torch.exp",
"torch.pow",
"torch.eq",
"torch.sum",
"torch.flip",
"copy.deepcopy",
"torch.nn.functional.pad",
"os.walk",
"torch.arange",
"os.listdir",
"torch.matmul",
"torch.zeros_like",
"torch.autograd.Variable",
"torch.argma... | [((806, 821), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (819, 821), False, 'import torch\n'), ((1697, 1712), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1710, 1712), False, 'import torch\n'), ((2365, 2380), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2378, 2380), False, 'import torch\n'), ((... |
import cv2
import torch
import yaml
import imageio
import throttle
import numpy as np
import matplotlib.pyplot as plt
from argparse import ArgumentParser
from skimage.transform import resize
from scipy.spatial import ConvexHull
from modules.generator import OcclusionAwareGenerator
from modules.keypoint_detector import... | [
"sync_batchnorm.DataParallelWithCallback",
"numpy.sqrt",
"modules.generator.OcclusionAwareGenerator",
"argparse.ArgumentParser",
"torch.load",
"modules.keypoint_detector.KPDetector",
"throttle.wrap",
"yaml.load",
"cv2.imshow",
"numpy.array",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"torc... | [((2735, 2754), 'throttle.wrap', 'throttle.wrap', (['(1)', '(2)'], {}), '(1, 2)\n', (2748, 2754), False, 'import throttle\n'), ((1863, 1980), 'modules.generator.OcclusionAwareGenerator', 'OcclusionAwareGenerator', ([], {}), "(**config['model_params']['generator_params'], **\n config['model_params']['common_params'])... |
import numpy as np
from albumentations import (Compose, HorizontalFlip, VerticalFlip, Rotate, RandomRotate90,
ShiftScaleRotate, ElasticTransform,
GridDistortion, RandomSizedCrop, RandomCrop, CenterCrop,
RandomBrightnessContrast, HueSatu... | [
"albumentations.ShiftScaleRotate",
"albumentations.pytorch.ToTensorV2",
"albumentations.RandomBrightnessContrast",
"albumentations.GaussianBlur",
"albumentations.CoarseDropout",
"albumentations.GaussNoise",
"albumentations.HueSaturationValue",
"numpy.array",
"albumentations.Normalize",
"get_config... | [((737, 749), 'get_config.get_config', 'get_config', ([], {}), '()\n', (747, 749), False, 'from get_config import get_config\n'), ((758, 789), 'numpy.array', 'np.array', (['[0.485, 0.456, 0.406]'], {}), '([0.485, 0.456, 0.406])\n', (766, 789), True, 'import numpy as np\n'), ((797, 828), 'numpy.array', 'np.array', (['[0... |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Defines coordinate frames and ties them to data axes.
"""
from __future__ import absolute_import, division, unicode_literals, print_function
import numpy as np
from astropy import units as u
from astropy import utils as astutil
from astropy import coo... | [
"numpy.unique",
"numpy.isscalar",
"astropy.units.Unit",
"astropy.coordinates.SkyCoord",
"astropy.utils.isiterable"
] | [((8880, 8897), 'numpy.isscalar', 'np.isscalar', (['args'], {}), '(args)\n', (8891, 8897), True, 'import numpy as np\n'), ((2050, 2074), 'astropy.utils.isiterable', 'astutil.isiterable', (['unit'], {}), '(unit)\n', (2068, 2074), True, 'from astropy import utils as astutil\n'), ((7717, 7783), 'astropy.coordinates.SkyCoo... |
import lx
import modo
import select
import item
from run import run
class ChannelModifierUtils(object):
@classmethod
def attachModifierToItem(cls, modifierModoItem, hostModoItem):
"""
Allows for attaching modifier to locator type item.
Attached item will show up under the locator ... | [
"run.run",
"modo.Vector3",
"item.ItemUtils.addForwardGraphConnections",
"select.ItemSelection"
] | [((757, 846), 'item.ItemUtils.addForwardGraphConnections', 'item.ItemUtils.addForwardGraphConnections', (['modifierModoItem', 'hostModoItem', '"""chanMods"""'], {}), "(modifierModoItem, hostModoItem,\n 'chanMods')\n", (798, 846), False, 'import item\n'), ((2133, 2155), 'select.ItemSelection', 'select.ItemSelection',... |
from brownie import FundMe
from scripts.helpful_scripts import get_account
def fund():
fund_me = FundMe[-1]
account = get_account()
entrance_fee = fund_me.getEntranceFee()
print(f"entrance is {entrance_fee}")
print("funding..")
fund_me.fund({"from": account, "value": entrance_fee})
def withd... | [
"scripts.helpful_scripts.get_account"
] | [((128, 141), 'scripts.helpful_scripts.get_account', 'get_account', ([], {}), '()\n', (139, 141), False, 'from scripts.helpful_scripts import get_account\n'), ((366, 379), 'scripts.helpful_scripts.get_account', 'get_account', ([], {}), '()\n', (377, 379), False, 'from scripts.helpful_scripts import get_account\n')] |
# Um professor quer sortear um dos seus quatro alunos para apagar o quadro. Faça um programa que ajude ele, lendo o nome dos alunos e escrevendo na tela o nome do escolhido.
from random import choice
nome1 = input('Digite um nome: ')
nome2 = input('Digite outro nome: ')
nome3 = input('Digite mais um nome: ')
nome4 = ... | [
"random.choice"
] | [((395, 407), 'random.choice', 'choice', (['nome'], {}), '(nome)\n', (401, 407), False, 'from random import choice\n')] |
import datetime
import os
import shutil
import tempfile
from joblib import Parallel, delayed
from fastpic_upload import upload_file_to_fastpic
_n_jobs_for_upload = 20
_root_folders_set = (
'/path/to/folder',
)
_spoiler_for_each_file = True
def process_one_pic(result_key, pic_path, tmp_dir):
pic_url, pic_l... | [
"os.path.join",
"fastpic_upload.upload_file_to_fastpic",
"datetime.datetime.now",
"joblib.Parallel",
"tempfile.mkdtemp",
"shutil.rmtree",
"joblib.delayed",
"os.walk"
] | [((326, 367), 'fastpic_upload.upload_file_to_fastpic', 'upload_file_to_fastpic', (['pic_path', 'tmp_dir'], {}), '(pic_path, tmp_dir)\n', (348, 367), False, 'from fastpic_upload import upload_file_to_fastpic\n'), ((523, 543), 'os.walk', 'os.walk', (['folder_path'], {}), '(folder_path)\n', (530, 543), False, 'import os\n... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on Dec 8, 2019
.. codeauthor: <NAME>
<<EMAIL>>
Index docs into ES
https://qbox.io/blog/building-an-elasticsearch-index-with-python
'''
from settings import *
import glob
import re
# n first characters for the doc preview
LIMIT_START = 100
txts_path = '%s... | [
"re.sub",
"elasticsearch.Elasticsearch",
"glob.glob"
] | [((1394, 1424), 'elasticsearch.Elasticsearch', 'Elasticsearch', ([], {'hosts': '[ES_HOST]'}), '(hosts=[ES_HOST])\n', (1407, 1424), False, 'from elasticsearch import Elasticsearch\n'), ((466, 501), 'glob.glob', 'glob.glob', (["(txts_path + '*_text.txt')"], {}), "(txts_path + '*_text.txt')\n", (475, 501), False, 'import ... |
from django.db import models
from django.db.models import Q
from django.contrib.auth.models import User
from django.urls import reverse
class ProjectQuerySet(models.QuerySet):
def projects_per_user(self, user):
return self.filter(
Q(project_owner=user.username)
)
class Projects(model... | [
"django.db.models.DateTimeField",
"django.db.models.Q",
"django.db.models.CharField",
"django.db.models.IntegerField"
] | [((349, 380), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(60)'}), '(max_length=60)\n', (365, 380), False, 'from django.db import models\n'), ((401, 446), 'django.db.models.CharField', 'models.CharField', ([], {'default': 'User', 'max_length': '(60)'}), '(default=User, max_length=60)\n', (417... |
import os
import unittest
from checkov.serverless.checks.function.aws.AdminPolicyDocument import check
from checkov.serverless.runner import Runner
from checkov.runner_filter import RunnerFilter
class TestAdminPolicyDocument(unittest.TestCase):
def test_summary(self):
runner = Runner()
current_di... | [
"unittest.main",
"os.path.realpath",
"checkov.serverless.runner.Runner",
"checkov.runner_filter.RunnerFilter"
] | [((1148, 1163), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1161, 1163), False, 'import unittest\n'), ((293, 301), 'checkov.serverless.runner.Runner', 'Runner', ([], {}), '()\n', (299, 301), False, 'from checkov.serverless.runner import Runner\n'), ((340, 366), 'os.path.realpath', 'os.path.realpath', (['__file... |
#!/usr/bin/python3
# encoding: utf-8
import os
import sys
import getopt
import logging
import shutil
import psutil
from modules.com_run import ComGenerator
from modules.web_server import ListenServer
from modules.Wlisten_server import WListenServer
from modules.payload_builder_factory import PayloadBuilderFactory
from... | [
"common.utils.randomAlpha",
"pro_modules.payload_builders.containers.ContainerGenerator",
"common.utils.checkModuleExist",
"pro_core.payload_builder_factory_pro.PayloadBuilderFactoryPro",
"time.sleep",
"logging.exception",
"pro_modules.utilities.dcom_run.DcomGenerator",
"modules.com_run.ComGenerator",... | [((640, 674), 'common.utils.checkModuleExist', 'utils.checkModuleExist', (['"""pro_core"""'], {}), "('pro_core')\n", (662, 674), False, 'from common import utils, mp_session, help\n'), ((1132, 1138), 'colorama.init', 'init', ([], {}), '()\n', (1136, 1138), False, 'from colorama import init\n'), ((1173, 1191), 'common.h... |
"""
This module defines the ``geoplot`` coordinate reference system classes, wrappers on
``cartopy.crs`` objects meant to be used as parameters to the ``projection`` parameter of all
front-end ``geoplot`` outputs. For the list of Cartopy CRS objects this module derives from,
refer to http://scitools.org.uk/cartopy/docs... | [
"geopandas.GeoDataFrame"
] | [((4688, 4706), 'geopandas.GeoDataFrame', 'gpd.GeoDataFrame', ([], {}), '()\n', (4704, 4706), True, 'import geopandas as gpd\n')] |
from common_fixtures import * # NOQA
import websocket as ws
import pytest
def get_logs(client):
hosts = client.list_host(kind='docker', removed_null=True)
assert len(hosts) > 0
in_log = random_str()
cmd = '/bin/bash -c "echo {}; sleep 2"'.format(in_log)
c = client.create_container(image=TEST_IMAG... | [
"websocket.create_connection",
"pytest.raises"
] | [((500, 555), 'websocket.create_connection', 'ws.create_connection', (["(logs.url + '?token=' + logs.token)"], {}), "(logs.url + '?token=' + logs.token)\n", (520, 555), True, 'import websocket as ws\n'), ((743, 767), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (756, 767), False, 'import pyte... |
'''ResNet using PSG in PyTorch.
For Pre-activation ResNet, see 'preact_resnet.py'.
Reference:
[1] <NAME>, <NAME>, <NAME>, <NAME>
Deep Residual Learning for Image Recognition. arXiv:1512.03385
'''
from numpy.lib.arraysetops import isin
import torch
import torch.nn as nn
import torch.nn.functional as F
import math
... | [
"torch.nn.BatchNorm2d",
"torch.ones_like",
"torch.nn.Sequential",
"torch.nn.init._calculate_correct_fan",
"math.sqrt",
"models.masked_psg_seed_conv.PredictiveSeedConv2d",
"torch.nn.init.kaiming_normal_",
"torch.nn.init.kaiming_uniform_",
"torch.nn.init.xavier_normal_",
"torch.nn.init._calculate_fa... | [((952, 1468), 'models.masked_psg_seed_conv.PredictiveSeedConv2d', 'PredictiveSeedConv2d', (['in_planes', 'out_planes'], {'kernel_size': '(1)', 'stride': 'stride', 'padding': '(0)', 'bias': '(False)', 'num_bits': 'NUM_BITS', 'num_bits_weight': 'NUM_BITS_WEIGHT', 'num_bits_grad': 'NUM_BITS_GRAD', 'biprecision': 'BIPRECI... |
import cv2
import time
import socket
import threading
class Response(object):
def __init__(self):
pass
def recv(self, data):
pass
def pop(self):
pass
def empty(self):
pass
class Command(Response):
def __init__(self):
super(Command, self).__init__()
... | [
"socket.socket",
"threading.RLock",
"cv2.VideoCapture",
"threading.Thread",
"time.time"
] | [((365, 382), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (380, 382), False, 'import threading\n'), ((830, 847), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (845, 847), False, 'import threading\n'), ((1311, 1359), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '... |
#!/usr/bin/env python3
import tensorflow as tf
physical_devices = tf.config.list_physical_devices('GPU')
try:
tf.config.experimental.set_memory_growth(physical_devices[0], True)
except:
# Invalid device or cannot modify virtual devices once initialized.
pass
import numpy as np
import os, time, csv
import ... | [
"tensorflow.train.Checkpoint",
"tensorflow.config.list_physical_devices",
"tensorflow.nn.softmax",
"umap.UMAP",
"net.FeatureBlock",
"matplotlib.pyplot.close",
"net.FontData",
"net.SimpleDecoderBlock",
"numpy.concatenate",
"tensorflow.train.CheckpointManager",
"matplotlib.use",
"numpy.argmax",
... | [((67, 105), 'tensorflow.config.list_physical_devices', 'tf.config.list_physical_devices', (['"""GPU"""'], {}), "('GPU')\n", (98, 105), True, 'import tensorflow as tf\n'), ((355, 376), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (369, 376), False, 'import matplotlib\n'), ((115, 182), 'tensorfl... |
from __future__ import absolute_import, division, print_function
import logging
import docker
import tempfile
import requests
from requests.exceptions import RequestException
import json
import pprint
import time
import re
import os
import tarfile
import sys
from cloudpickle import CloudPickler
import pickle
import num... | [
"logging.basicConfig",
"logging.getLogger",
"re.compile",
"tarfile.TarFile",
"docker.from_env",
"tempfile.NamedTemporaryFile",
"tarfile.TarInfo"
] | [((1180, 1302), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s %(levelname)-8s %(message)s"""', 'datefmt': '"""%y-%m-%d:%H:%M:%S"""', 'level': 'logging.INFO'}), "(format='%(asctime)s %(levelname)-8s %(message)s',\n datefmt='%y-%m-%d:%H:%M:%S', level=logging.INFO)\n", (1199, 1302), Fals... |
from keras import layers
# Single-layer LSTM model for next-character prediction
model = keras.models.Sequential()
model.add(layers.LSTM(128, input_shape=(maxlen, len(chars))))
model.add(layers.Dense(len(chars), activation='softmax'))
# Model compilation configuration
optimizer = keras.optimizers.RMSprop(lr=0.01)
mod... | [
"sys.stdout.write"
] | [((1298, 1330), 'sys.stdout.write', 'sys.stdout.write', (['generated_text'], {}), '(generated_text)\n', (1314, 1330), False, 'import sys\n'), ((1884, 1911), 'sys.stdout.write', 'sys.stdout.write', (['next_char'], {}), '(next_char)\n', (1900, 1911), False, 'import sys\n')] |
"""
Example of ModECI MDF - Testing state variables
"""
from modeci_mdf.mdf import *
import sys
def main():
mod = Model(id="States")
mod_graph = Graph(id="state_example")
mod.graphs.append(mod_graph)
## Counter node
counter_node = Node(id="counter_node")
p1 = Parameter(id="increment", v... | [
"matplotlib.pyplot.plot",
"matplotlib.pyplot.show",
"modeci_mdf.execution_engine.EvaluableGraph"
] | [((1568, 1602), 'modeci_mdf.execution_engine.EvaluableGraph', 'EvaluableGraph', (['mod_graph', 'verbose'], {}), '(mod_graph, verbose)\n', (1582, 1602), False, 'from modeci_mdf.execution_engine import EvaluableGraph\n'), ((2183, 2201), 'matplotlib.pyplot.plot', 'plt.plot', (['times', 's'], {}), '(times, s)\n', (2191, 22... |
# -*- coding: utf-8 -*-
'''
Tests for the file state
'''
# Import python libs
from __future__ import absolute_import
import errno
import os
import textwrap
import tempfile
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.paths import TMP_STATE_TREE
from tests.support.mixins impor... | [
"textwrap.dedent",
"os.close",
"os.path.join",
"tempfile.gettempdir",
"tempfile.mkstemp",
"os.remove"
] | [((1258, 1302), 'os.path.join', 'os.path.join', (['TMP_STATE_TREE', 'state_filename'], {}), '(TMP_STATE_TREE, state_filename)\n', (1270, 1302), False, 'import os\n'), ((1386, 1404), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (1402, 1404), False, 'import tempfile\n'), ((1628, 1646), 'tempfile.mkstemp', 't... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 1999-2018 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-... | [
"tiledb.SparseArray",
"tiledb.DenseArray",
"numpy.empty",
"numpy.ascontiguousarray"
] | [((1186, 1231), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['ctx[chunk.op.input.key]'], {}), '(ctx[chunk.op.input.key])\n', (1206, 1231), True, 'import numpy as np\n'), ((1650, 1696), 'numpy.empty', 'np.empty', (['((0,) * chunk.ndim)'], {'dtype': 'chunk.dtype'}), '((0,) * chunk.ndim, dtype=chunk.dtype)\n', (16... |
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompa... | [
"awses_test_vectors.manifests.full_message.decrypt.MessageDecryptionManifest",
"attr.validators.instance_of",
"awses_test_vectors.manifests.full_message.encrypt.MessageEncryptionTestScenario.from_scenario",
"copy.copy",
"awses_test_vectors.manifests.master_key.MasterKeySpec.from_scenario",
"awses_test_vec... | [((5257, 5286), 'aws_encryption_sdk.caches.local.LocalCryptoMaterialsCache', 'LocalCryptoMaterialsCache', (['(10)'], {}), '(10)\n', (5282, 5286), False, 'from aws_encryption_sdk.caches.local import LocalCryptoMaterialsCache\n'), ((5309, 5438), 'aws_encryption_sdk.materials_managers.caching.CachingCryptoMaterialsManager... |
import torch
import torchtestcase
from neural_spline_flows.nde.transforms import base
class TransformTest(torchtestcase.TorchTestCase):
"""Base test for all transforms."""
def assert_tensor_is_good(self, tensor, shape=None):
self.assertIsInstance(tensor, torch.Tensor)
self.assertFalse(torch.... | [
"torch.isnan",
"neural_spline_flows.nde.transforms.base.InverseTransform",
"neural_spline_flows.nde.transforms.base.CompositeTransform",
"torch.equal",
"torch.isinf",
"torch.Size",
"torch.zeros"
] | [((576, 608), 'neural_spline_flows.nde.transforms.base.InverseTransform', 'base.InverseTransform', (['transform'], {}), '(transform)\n', (597, 608), False, 'from neural_spline_flows.nde.transforms import base\n'), ((628, 673), 'neural_spline_flows.nde.transforms.base.CompositeTransform', 'base.CompositeTransform', (['[... |
import os
database_url = os.environ.get('DATABASE_URL')
| [
"os.environ.get"
] | [((26, 56), 'os.environ.get', 'os.environ.get', (['"""DATABASE_URL"""'], {}), "('DATABASE_URL')\n", (40, 56), False, 'import os\n')] |
from datetime import date
from six import BytesIO, binary_type, u
from six.moves.urllib.parse import parse_qsl, urlencode
from unittest2 import TestCase
import mock
from authorizesauce.apis.transaction import PROD_URL, TEST_URL, TransactionAPI
from authorizesauce.data import Address, CreditCard
from authorizesauce.e... | [
"mock.patch",
"authorizesauce.data.Address",
"six.moves.urllib.parse.parse_qsl",
"six.moves.urllib.parse.urlencode",
"authorizesauce.apis.transaction.TransactionAPI",
"datetime.date.today",
"six.BytesIO.__init__",
"authorizesauce.data.CreditCard",
"six.u"
] | [((2990, 3043), 'mock.patch', 'mock.patch', (['"""authorizesauce.apis.transaction.urlopen"""'], {}), "('authorizesauce.apis.transaction.urlopen')\n", (3000, 3043), False, 'import mock\n'), ((3435, 3488), 'mock.patch', 'mock.patch', (['"""authorizesauce.apis.transaction.urlopen"""'], {}), "('authorizesauce.apis.transact... |
import torch
from ....data.utils.boxes import centroids2corners, iou
def matching_strategy(targets, dboxes, **kwargs):
"""
:param targets: Tensor, shape is (batch*object num(batch), 1+4+class_labels)
:param dboxes: shape is (default boxes num, 4)
IMPORTANT: Note that means (cx, cy, w, h)
:param kw... | [
"torch.logical_not",
"torch.empty"
] | [((1129, 1198), 'torch.empty', 'torch.empty', (['(batch_num, dboxes_num)'], {'device': 'device', 'dtype': 'torch.bool'}), '((batch_num, dboxes_num), device=device, dtype=torch.bool)\n', (1140, 1198), False, 'import torch\n'), ((1200, 1266), 'torch.empty', 'torch.empty', (['(batch_num, dboxes_num, 4 + class_num)'], {'de... |
"""Routines for numerical differentiation."""
from __future__ import division
import numpy as np
from numpy.linalg import norm
from scipy.sparse.linalg import LinearOperator
from ..sparse import issparse, csc_matrix, csr_matrix, coo_matrix, find
from ._group_columns import group_dense, group_sparse
EPS = np.finfo(n... | [
"scipy.sparse.linalg.LinearOperator",
"numpy.hstack",
"numpy.equal",
"numpy.linalg.norm",
"numpy.random.RandomState",
"numpy.atleast_2d",
"numpy.isscalar",
"numpy.asarray",
"numpy.max",
"numpy.resize",
"numpy.empty",
"numpy.maximum",
"numpy.isinf",
"numpy.abs",
"numpy.any",
"numpy.nonz... | [((310, 330), 'numpy.finfo', 'np.finfo', (['np.float64'], {}), '(np.float64)\n', (318, 330), True, 'import numpy as np\n'), ((1858, 1898), 'numpy.all', 'np.all', (['((lb == -np.inf) & (ub == np.inf))'], {}), '((lb == -np.inf) & (ub == np.inf))\n', (1864, 1898), True, 'import numpy as np\n'), ((13340, 13357), 'numpy.atl... |
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to i... | [
"tests.base.EvalModelTemplate.load_from_checkpoint",
"torch.nn.CrossEntropyLoss",
"fsspec.implementations.local.LocalFileSystem",
"pickle.dumps",
"pytorch_lightning.utilities.is_picklable",
"pytorch_lightning.Trainer",
"argparse.Namespace",
"pickle.loads",
"os.listdir",
"omegaconf.OmegaConf.create... | [((2764, 2834), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""cls"""', '[SaveHparamsModel, AssignHparamsModel]'], {}), "('cls', [SaveHparamsModel, AssignHparamsModel])\n", (2787, 2834), False, 'import pytest\n'), ((3026, 3096), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""cls"""', '[SaveHpa... |
import unittest
from coinplus_solo_redeem.common import wif_export_bitcoin, compute_public_key_sec256k1, address_from_publickey_ethereum
class TestEthereum(unittest.TestCase):
"""test of the bitcoin conversion from private key to wif"""
def setUp(self):
self.test_add_vector = [("03cb3e5f30245658e1e3615... | [
"coinplus_solo_redeem.common.address_from_publickey_ethereum"
] | [((1486, 1528), 'coinplus_solo_redeem.common.address_from_publickey_ethereum', 'address_from_publickey_ethereum', (['publickey'], {}), '(publickey)\n', (1517, 1528), False, 'from coinplus_solo_redeem.common import wif_export_bitcoin, compute_public_key_sec256k1, address_from_publickey_ethereum\n')] |
import ast
import operator
import re
from collections import OrderedDict
from functools import partial
from ..cache import Cache
from ..exceptions import PluginError, NoStreamsError
from ..options import Options
# FIXME: This is a crude attempt at making a bitrate's
# weight end up similar to the weight of a resolut... | [
"collections.OrderedDict",
"re.match",
"ast.literal_eval",
"functools.partial",
"re.findall"
] | [((1082, 1159), 're.match', 're.match', (['"""^(\\\\d+)(k|p)?(\\\\d+)?(\\\\+)?(?:_(\\\\d+)k)?(?:_(alt)(\\\\d)?)?$"""', 'stream'], {}), "('^(\\\\d+)(k|p)?(\\\\d+)?(\\\\+)?(?:_(\\\\d+)k)?(?:_(alt)(\\\\d)?)?$', stream)\n", (1090, 1159), False, 'import re\n'), ((2624, 2678), 're.match', 're.match', (['"""(?P<op><=|>=|<|>)?... |
# This file is part of the Reference Data Repository (refdata).
#
# Copyright (C) 2021 New York University.
#
# refdata is free software; you can redistribute it and/or modify it under the
# terms of the MIT License; see LICENSE file for more details.
"""Fixtures for testing the command-line interface."""
import os
i... | [
"refdata.db.DB",
"os.path.join",
"click.testing.CliRunner"
] | [((631, 663), 'os.path.join', 'os.path.join', (['basedir', '"""test.db"""'], {}), "(basedir, 'test.db')\n", (643, 663), False, 'import os\n'), ((843, 854), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (852, 854), False, 'from click.testing import CliRunner\n'), ((669, 696), 'refdata.db.DB', 'DB', ([], {'co... |
#!/usr/bin/python3
# ***************************************************************
# Copyright (c) 2022 Jittor. All Rights Reserved.
# Maintainers:
# <NAME> <<EMAIL>>.
#
# This file is subject to the terms and conditions defined in
# file 'LICENSE.txt', which is part of this source code package.
# ***********... | [
"os.system",
"os.path.basename"
] | [((899, 921), 'os.path.basename', 'os.path.basename', (['name'], {}), '(name)\n', (915, 921), False, 'import os\n'), ((663, 677), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (672, 677), False, 'import os\n')] |
from setuptools import setup
PLUGIN_NAME = "papermill"
microlib_name = f"flytekitplugins-{PLUGIN_NAME}"
plugin_requires = [
"flytekit>=0.16.0b0,<1.0.0",
"flytekitplugins-spark>=0.16.0b0,<1.0.0,!=0.24.0b0",
"papermill>=1.2.0",
"nbconvert>=6.0.7",
"ipykernel>=5.0.0",
]
__version__ = "0.0.0+develop... | [
"setuptools.setup"
] | [((323, 1153), 'setuptools.setup', 'setup', ([], {'name': 'microlib_name', 'version': '__version__', 'author': '"""flyteorg"""', 'author_email': '"""<EMAIL>"""', 'description': '"""This is the flytekit papermill plugin"""', 'namespace_packages': "['flytekitplugins']", 'packages': "[f'flytekitplugins.{PLUGIN_NAME}']", '... |
import pyttsx3
import datetime
import speech_recognition as sr
import wikipedia
import webbrowser
import os
import smtplib
engine = pyttsx3.init('sapi5')
voices = engine.getProperty('voices')
engine.setProperty('voice', voices[0].id)
# To change the voice to female change 0 to 1.
def speak(au... | [
"smtplib.SMTP",
"os.listdir",
"pyttsx3.init",
"os.path.join",
"webbrowser.open",
"speech_recognition.Recognizer",
"datetime.datetime.now",
"speech_recognition.Microphone",
"os.startfile",
"wikipedia.summary"
] | [((145, 166), 'pyttsx3.init', 'pyttsx3.init', (['"""sapi5"""'], {}), "('sapi5')\n", (157, 166), False, 'import pyttsx3\n'), ((495, 510), 'speech_recognition.Recognizer', 'sr.Recognizer', ([], {}), '()\n', (508, 510), True, 'import speech_recognition as sr\n'), ((961, 995), 'smtplib.SMTP', 'smtplib.SMTP', (['"""smtp.gma... |
"""
Ory Kratos API
Documentation for all public and administrative Ory Kratos APIs. Public and administrative APIs are exposed on different ports. Public APIs can face the public internet without any protection while administrative APIs should never be exposed without prior authorization. To protect the admini... | [
"unittest.main",
"ory_kratos_client.api.v0alpha1_api.V0alpha1Api"
] | [((7305, 7320), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7318, 7320), False, 'import unittest\n'), ((842, 855), 'ory_kratos_client.api.v0alpha1_api.V0alpha1Api', 'V0alpha1Api', ([], {}), '()\n', (853, 855), False, 'from ory_kratos_client.api.v0alpha1_api import V0alpha1Api\n')] |
"""The present code is the Version 1.0 of the RCNN approach to perform MPS
in 3D for categorical variables. It has been developed by <NAME> and <NAME> in the
Geometallurygical Group at Queen's University as part of a PhD program.
The code is not free of bugs but running end-to-end.
Any comments and further improv... | [
"External_Functions_3D.Grid",
"numpy.around",
"gc.collect",
"External_Functions_3D.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D",
"time.time"
] | [((926, 937), 'time.time', 'time.time', ([], {}), '()\n', (935, 937), False, 'import time\n'), ((3406, 3510), 'External_Functions_3D.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D', 'fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D', ([], {'HyperPar': 'HyperPar', 'LocModel': 'LocModel'}), '(HyperPar=\n... |
import logging
from typing import Dict
from django.http import HttpRequest
logger = logging.getLogger(__name__)
class FeatureFlagProvider:
def is_feature_enabled(self, feature_name: str, user_id: str = None, attributes: Dict = None):
raise NotImplementedError("You must override FeatureFlagProvider.is_fe... | [
"logging.getLogger",
"django.conf.settings.FEATURE_FLAG_PROVIDER.is_feature_enabled"
] | [((86, 113), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (103, 113), False, 'import logging\n'), ((946, 1069), 'django.conf.settings.FEATURE_FLAG_PROVIDER.is_feature_enabled', 'settings.FEATURE_FLAG_PROVIDER.is_feature_enabled', ([], {'feature_name': 'feature_name', 'user_id': '"""dont... |
from collections import OrderedDict
import torch
import torch.nn as nn
from torch_geometric.data.batch import Batch
class GNN(nn.Module):
def __init__(self, mp_steps, **config):
super().__init__()
self.mp_steps = mp_steps
self.update_fns = self.assign_update_fns()
self.readout_fns... | [
"torch.bincount"
] | [((642, 669), 'torch.bincount', 'torch.bincount', (['batch.batch'], {}), '(batch.batch)\n', (656, 669), False, 'import torch\n')] |
"""
SVG export test
"""
import test
import pyqtgraph as pg
app = pg.mkQApp()
class SVGTest(test.TestCase):
#def test_plotscene(self):
#pg.setConfigOption('foreground', (0,0,0))
#w = pg.GraphicsWindow()
#w.show()
#p1 = w.addPlot()
#p2 = w.addPlot()
#p1.plot([1... | [
"pyqtgraph.QtGui.QGraphicsRectItem",
"pyqtgraph.ItemGroup",
"pyqtgraph.mkQApp",
"test.unittest.main",
"pyqtgraph.exporters.SVGExporter.SVGExporter",
"pyqtgraph.mkPen",
"pyqtgraph.QtGui.QGraphicsScene"
] | [((65, 76), 'pyqtgraph.mkQApp', 'pg.mkQApp', ([], {}), '()\n', (74, 76), True, 'import pyqtgraph as pg\n'), ((2116, 2136), 'test.unittest.main', 'test.unittest.main', ([], {}), '()\n', (2134, 2136), False, 'import test\n'), ((704, 729), 'pyqtgraph.QtGui.QGraphicsScene', 'pg.QtGui.QGraphicsScene', ([], {}), '()\n', (727... |
"""This file contains functions for loading and preprocessing pianoroll data.
"""
import logging
import numpy as np
import tensorflow.compat.v1 as tf
from musegan.config import SHUFFLE_BUFFER_SIZE, PREFETCH_SIZE
LOGGER = logging.getLogger(__name__)
# --- Data loader ----------------------------------------------------... | [
"logging.getLogger",
"numpy.issubdtype",
"numpy.random.randint",
"numpy.zeros",
"tensorflow.compat.v1.py_func",
"SharedArray.attach",
"numpy.load"
] | [((221, 248), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (238, 248), False, 'import logging\n'), ((437, 454), 'numpy.load', 'np.load', (['filename'], {}), '(filename)\n', (444, 454), True, 'import numpy as np\n'), ((1379, 1403), 'numpy.random.randint', 'np.random.randint', (['(-5)', '... |
import torch
import torch.nn as nn
import torch.nn.functional as F
from .kernels import (
get_spatial_gradient_kernel2d,
get_spatial_gradient_kernel3d,
normalize_kernel2d
)
def spatial_gradient(input, mode='sobel', order=1, normalized=True):
"""
Computes the first order image derivative in bo... | [
"torch.nn.functional.pad",
"torch.sqrt",
"torch.nn.functional.conv3d"
] | [((2916, 2951), 'torch.sqrt', 'torch.sqrt', (['(gx * gx + gy * gy + eps)'], {}), '(gx * gx + gy * gy + eps)\n', (2926, 2951), False, 'import torch\n'), ((1255, 1299), 'torch.nn.functional.conv3d', 'F.conv3d', (['padded_inp', 'kernel_flip'], {'padding': '(0)'}), '(padded_inp, kernel_flip, padding=0)\n', (1263, 1299), Tr... |
# ******************************************************************
# |docname| - Provide `docker_tools.py` as the script `docker-tools`
# ******************************************************************
from setuptools import setup
setup(
name="runestone-docker-tools",
version="0.1",
install_requires=[... | [
"setuptools.setup"
] | [((237, 397), 'setuptools.setup', 'setup', ([], {'name': '"""runestone-docker-tools"""', 'version': '"""0.1"""', 'install_requires': "['click']", 'entry_points': "{'console_scripts': ['docker-tools = docker_tools:cli']}"}), "(name='runestone-docker-tools', version='0.1', install_requires=[\n 'click'], entry_points={... |
import os
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import numpy as np
import matplotlib.pyplot as plt
# local model
import sys
sys.path.append("../network")
import Coral
from lstm import LSTMHardSigmoid
from AdaBN import AdaBN
sys.path.append("../network/Aut... | [
"torch.nn.Conv1d",
"torch.nn.ReLU",
"torch.nn.Dropout",
"torch.nn.CrossEntropyLoss",
"torch.nn.init.constant_",
"torch.max",
"torch.cuda.device_count",
"torch.cuda.is_available",
"sys.path.append",
"torch.nn.MaxPool1d",
"os.path.exists",
"Coral.CORAL_torch",
"torch.nn.init.xavier_uniform_",
... | [((189, 218), 'sys.path.append', 'sys.path.append', (['"""../network"""'], {}), "('../network')\n", (204, 218), False, 'import sys\n'), ((289, 330), 'sys.path.append', 'sys.path.append', (['"""../network/AutoEncoder"""'], {}), "('../network/AutoEncoder')\n", (304, 330), False, 'import sys\n'), ((13068, 13093), 'torch.c... |
import asyncio, json
from config import Config
from soundpad_manager import SoundpadManager
from version import BRIDGE_VERSION
import websockets
from sanic.log import logger
# yes I know that it's very lazy to run a separate WS and HTTP server, when both could be run on the same port
# I don't like sanics ... | [
"json.loads",
"json.dumps",
"sanic.log.logger.info",
"websockets.serve",
"asyncio.get_event_loop"
] | [((886, 944), 'sanic.log.logger.info', 'logger.info', (['f"""Websocket server is running on port {port}"""'], {}), "(f'Websocket server is running on port {port}')\n", (897, 944), False, 'from sanic.log import logger\n'), ((3359, 3418), 'json.dumps', 'json.dumps', (["{'type': 'event', 'event': event, 'data': data}"], {... |
from django.urls import path
from what_can_i_cook.views import WCICFilterView, WCICResultView
app_name = "wcic"
urlpatterns = [
path("", WCICFilterView.as_view(), name="wcic-start"),
path("results/", WCICResultView.as_view(), name="wcic-results"),
]
| [
"what_can_i_cook.views.WCICResultView.as_view",
"what_can_i_cook.views.WCICFilterView.as_view"
] | [((145, 169), 'what_can_i_cook.views.WCICFilterView.as_view', 'WCICFilterView.as_view', ([], {}), '()\n', (167, 169), False, 'from what_can_i_cook.views import WCICFilterView, WCICResultView\n'), ((212, 236), 'what_can_i_cook.views.WCICResultView.as_view', 'WCICResultView.as_view', ([], {}), '()\n', (234, 236), False, ... |
from setuptools import setup, find_packages
from distutils.extension import Extension
from distutils.command.sdist import sdist
try:
from Cython.Build import cythonize
USE_CYTHON = True
except ImportError:
USE_CYTHON = False
ext = 'pyx' if USE_CYTHON else 'c'
extensions = [Extension(
'dsigma.precomput... | [
"Cython.Build.cythonize",
"distutils.command.sdist.sdist.run",
"setuptools.find_packages"
] | [((462, 483), 'Cython.Build.cythonize', 'cythonize', (['extensions'], {}), '(extensions)\n', (471, 483), False, 'from Cython.Build import cythonize\n'), ((548, 591), 'Cython.Build.cythonize', 'cythonize', (["['dsigma/precompute_engine.pyx']"], {}), "(['dsigma/precompute_engine.pyx'])\n", (557, 591), False, 'from Cython... |