content stringlengths 5 1.05M |
|---|
import uuid #python uuid package to give each uploaded image a unique name/id
import os #as using os.path to provide a valid path for our file destination
from django.db import models
from django.contrib.auth.models import AbstractBaseUser, BaseUserManager, PermissionsMixin
from django.conf import settings #as we want... |
import asyncio
from contextlib import suppress
from os.path import dirname
from os.path import join
import pyperf # type: ignore
from dipdup.config import DipDupConfig
from dipdup.dipdup import DipDup
from dipdup.test import with_operation_index_fuzzer
def add_cmdline_args(cmd, args):
cmd += ['--quiet']
runn... |
from gym.spaces import Box, Dict, Discrete, MultiDiscrete, Tuple
import numpy as np
import unittest
import ray
from ray.tune import register_env
from ray.rllib.algorithms.qmix import QMixConfig
from ray.rllib.env.multi_agent_env import MultiAgentEnv
class AvailActionsTestEnv(MultiAgentEnv):
num_actions = 10
... |
from enum import Enum
from glTF_editor.common.utils_py import \
UnicodeType
from glTF_editor.common.data_serializer import \
serializer
class Asset(object):
"""
...
"""
def __init__(self,
version=None,
copyright=None,
generator=None,
... |
# Copyright 2014-2016 Insight Software Consortium.
# Copyright 2004-2008 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0.
# See http://www.boost.org/LICENSE_1_0.txt
"""
Implementation details
"""
import re
class parser_t(object):
"""implementation details"""
... |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from __future__ import print_function
import os.path
import shutil
import llnl.util.tty as tty
from llnl.util.filesystem... |
from tkinter import *
import ged_lib as gl
import tag_records as tag
import write_ged_file as wgf
want_ui = True
def clicked1():
gl.process_ged_file()
def clicked2():
tag.tag_ancestors_families()
def clicked3():
wgf.write_ged_file()
def exit():
root.destroy()
if want_ui:
... |
import re
from sphinx.ext.autodoc import (
ALL, Documenter,
bool_option, members_option, members_set_option)
from .domain import SolidityDomain
from .sourceregistry import SolidityObject
from sphinx.util.logging import getLogger
logger = getLogger(__name__)
class SolidityObjectDocumenter(Documenter):
dom... |
import pyodbc
import sqlalchemy as sa
import sqlalchemy.dialects.mssql as mssql
import ibis.common.exceptions as com
import ibis.expr.datatypes as dt
import ibis.expr.operations as ops
import ibis.backends.base_sqlalchemy.alchemy as alch
# used for literal translate
from ibis.backends.base_sqlalchemy.alchemy import f... |
import math
class Config:
# input dim
window_width = 800
window_height = 800
intruder_size = 0
EPISODES = 1000
G = 9.8
tick = 30
scale = 30
# distance param
minimum_separation = 555 / scale
NMAC_dist = 150 / scale
horizon_dist = 4000 / scale
initial_min_dist = 3000... |
import unittest
from kiss_headers import Header, Headers, lock_output_type, parse_it
from kiss_headers.utils import decode_partials
RAW_HEADERS = """accept-ch: DPR
accept-ch-lifetime: 2592000
alt-svc: quic=":443"; ma=2592000; v="46,43", h3-Q050=":443"; ma=2592000, h3-Q049=":443"; ma=2592000, h3-Q048=":443"; ma=259200... |
from flask import request
from flask_restful import Resource
from src.models.alert import AlertModel
from src.models.user import User
from src.utils.checkauth import authrequired
class Alert(Resource):
@authrequired
def put(self):
data = request.get_json()
if data is None:
return {... |
import json
import logging
import re
from datetime import datetime, timedelta
from uuid import uuid4
import requests
import pytz
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import Column, ForeignKey
from sqlalchemy import Integer, Text, Boolean, String, Enum, DateTime, JSON
from sqlalchemy import asc
fro... |
'''
Created on 18 Jul 2017
@author: spotnuru
'''
from cassandra.cluster import Cluster
cluster = Cluster(['127.0.0.1'])
session = cluster.connect('task1')
session.execute("CREATE TABLE watches(id int PRIMARY KEY, "+ "name text);")
print("Table Created") |
from pathlib import Path
from mseg_semantic.utils.dataset import SemData, make_dataset
TEST_DATA_ROOT = Path(__file__).resolve().parent / "test_data"
def test_make_dataset() -> None:
"""Ensure make_dataset() returns the proper outputs
"""
split = "train"
data_root = "/home/dummy_data_root"
data_list_fpath = st... |
import _ast
from daipecore.lineage.DecoratorParserInterface import DecoratorParserInterface
from pysparkbundle.lineage.PathWriter import PathWriter
class PathWriterParser(DecoratorParserInterface):
def __init__(self, name: str, mode: str):
self.__name = name
self.__mode = mode
def parse(self,... |
"""
Indico Request Handler
"""
import json, traceback
import tornado.web
from intercombot.error import IndicoError, RouteNotFound, ServerError
from intercombot.utils import LOGGER
class JSONEncoder(json.JSONEncoder):
def default(self, o):
return json.JSONEncoder.default(self, o)
class IndicoHandler(torn... |
import torch as th
from torch.optim.optimizer import Optimizer, required
def normalize_param(W):
return W / W.norm(2).clamp(min=1e-12)
def to_vector(tensors):
"""Flatten a list of parameters/gradients to a vector"""
return th.cat([t.view(-1) for t in tensors]).detach()
def from_vector(tensors, vector)... |
#
# Copyright (c) 2021 Red Hat, Inc.
# This program and the accompanying materials are made
# available under the terms of the Eclipse Public License 2.0
# which is available at https://www.eclipse.org/legal/epl-2.0/
#
# SPDX-License-Identifier: EPL-2.0
#
# Contributors:
# Red Hat, Inc. - initial API and implementati... |
from ..base import ShopifyResource
from .usage_charge import UsageCharge
def _get_first_by_status(resources, status):
for resource in resources:
if resource.status == status:
return resource
return None
class RecurringApplicationCharge(ShopifyResource):
def usage_charges(self):
... |
import numpy
from .observable import BaseObservable
def get_flat_local_connections_log_values(wave_function, local_connections, all_use_conn):
local_connections_reshape = numpy.moveaxis(local_connections, 1, 0).reshape((-1,)
+ local... |
"""VIMS calibration data module.
Source: https://pds-imaging.jpl.nasa.gov/data/cassini/cassini_orbiter/vims-calibration-files/vims-pipeline-RC19-files-2018/ # noqa:501
"""
import numpy as np
from .vars import DATA, RC
from ..interp import lin_interp
from ..pds.times import dyear
class VIMSCalibData(type):
""... |
# coding=utf-8
#
levels={
'SXP':0,
'USE':0,
'MGR':0,
'MM3':0,
'CCK':0,
}
class Config(object):
preprocessorPrint=0
realtimeImportPrint=0
realtimeIssuePrint=0
realtimeUSE=0
realtimeCheckers=0
modules={
'USE':'modelscript.use.engine',
'MGR':'modelscript.use.engine.merger'... |
from collections import OrderedDict
from feature.feature import *
class FeatureMeta:
def __init__(self):
super().__init__()
self.continuous_feats = OrderedDict()
self.categorical_feats = OrderedDict()
self.multi_category_feats = OrderedDict()
self.feat_dict = {}
def a... |
"""
requests patcher module.
"""
from __future__ import absolute_import
import wrapt
from epsagon.modules.general_wrapper import wrapper
from ..events.urllib import UrllibEventFactory
def _wrapper(wrapped, instance, args, kwargs):
"""
General wrapper for requests instrumentation.
:param wrapped: wrapt's ... |
"""Tests for treadmill.runtime.linux.image.docker.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import io
import os
import shutil
import tempfile
import unittest
import mock
# Disable W0611: Unused import
imp... |
import urllib.parse
from datetime import timedelta
import pytest
from fastapi import status
from fastapi.testclient import TestClient
from api import db
from api.services.deck import create_snapshot_for_deck
from api.utils.auth import create_access_token
from ..utils import create_admin_token, create_user_token
from... |
"""
[2015-10-09] Challenge #235 [Hard] Contiguous Chain Variation
https://www.reddit.com/r/dailyprogrammer/comments/3o36b6/20151009_challenge_235_hard_contiguous_chain/
# Description
Based on [Challenge #227 Contiguous chains](http://redd.it/3gpjn3)
... but with a chain meaning 1 *continuous* strand, where each link ... |
from tensorflow.python.compiler.tensorrt import trt_convert as trt
conversion_params = trt.DEFAULT_TRT_CONVERSION_PARAMS._replace(
precision_mode=trt.TrtPrecisionMode.FP16,
max_workspace_size_bytes=8000000000)
converter = trt.TrtGraphConverterV2(
input_saved_model_dir='checkpoints/LPD_mobilenet_v2_keras_pr... |
""" Leetcode 322 - Coin Change
https://leetcode.com/problems/coin-change/
1. Time: O(n*amount) Memory: O(amount)
2. Time: O(n*amount) Memory: O(amount)
3. Time: Pending...
"""
from typing import List
class Solution1:
""" 1. Dynamic Programming
Borrow from: https://leetcode.com/problems/coin-change/discus... |
# %%
'''Example script for using differentiable WDFs to determine the parameters of an RC lowpass filter'''
import sys
sys.path.insert(0, "../lib")
import numpy as np
import tf_wdf as wdf
from tf_wdf import tf
import tqdm as tqdm
import matplotlib.pyplot as plt
import audio_dspy as adsp
import scipy.signal as signal
... |
#! /usr/bin/env python3
# Released to the public domain, by Tim Peters, 03 October 2000.
"""reindent [-d][-r][-v] [ path ... ]
-d (--dryrun) Dry run. Analyze, but don't make any changes to, files.
-r (--recurse) Recurse. Search dla all .py files w subdirectories too.
-n (--nobackup) No backup. Does nie make a... |
class お布団(object):
def __init__(self):
print("眠いよ")
def __enter__(self):
print("入眠")
return self
def __exit__(self, type_, value, traceback):
print(type_, value, traceback)
print("起床")
return True
def 状態確認(self):
print("オフトニアなうZzz")
def main(... |
def dedup_list(list):
"""
deduplicate list
"""
new_list = []
for item in list:
if item not in new_list:
new_list.append(item)
return new_list
|
import lab as B
from wbml.warning import warn_upmodule
from ..matrix import AbstractMatrix
from ..triangular import LowerTriangular, UpperTriangular
__all__ = []
@B.dispatch
def triangular_solve(a: LowerTriangular, b: AbstractMatrix, lower_a: bool = True):
if not lower_a:
warn_upmodule(
f'So... |
import json
import jsonpickle
def encode_indent(object):
"""
Method to encode JSON
Parameters
----------
object : object
Returns
-------
JSONEncoder
Encoded JSON object
"""
return json.dumps(json.loads(jsonpickle.encode(object)), indent=4, sort_keys=True)
def save_... |
import os
def create_key(template, outtype=("nii.gz",), annotation_classes=None):
if template is None or not template:
raise ValueError("Template must be a valid format string")
return template, outtype, annotation_classes
def infotodict(seqinfo):
"""Heuristic evaluator for determining which run... |
import pdb
import argparse
import random
import timeit
from typing import Any, Dict, List, Iterator, Optional, Sequence, Set, Tuple
import numpy as np
import pandas as pd
from tqdm import tqdm
from functools import partial
import deepchem as dc
from deepchem.data import Dataset
from deepchem.splits import Splitter
fr... |
import numpy as np
from numpy.random import default_rng
import scipy.sparse
import scipy.sparse.linalg
from melvin import BasisFunctions
def sech(x):
return 1.0 / np.cosh(x)
def load_scipy_sparse(xp):
if xp.__name__ == "numpy":
return scipy.sparse
elif xp.__name__ == "cupy":
import cupy... |
from __future__ import division, absolute_import, print_function
from opendeep.optimization.loss.loss import *
from opendeep.optimization.loss.binary_crossentropy import *
from opendeep.optimization.loss.categorical_crossentropy import *
from opendeep.optimization.loss.isotropic_gaussian_LL import *
from opendeep.opt... |
from .__version__ import __version__
from .cleanups import add_cleanup, add_critical_cleanup
from .conf import config
from .ctx import context
from .ctx import g, session, test
from .core.session import Session
# assertions
from . import assertions
should = assertions
from .assertions import (
assert_contains,
... |
from api.utils.db import db
from flask import request
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
github_access_token = db.Column(db.String(255))
github_id = db.Column(db.Integer)
github_login = db.Column(db.String(255))
username = db.Column(db.S... |
from json import load
from _pytest import config
import pytest
import os
from textwrap import dedent
from bisect_scanner.config import Config, load_config, configure
import bisect_scanner.config as config
@pytest.fixture
def valid_config_file(tmp_path):
content = dedent("""
[node_urls]
W3_URL=wss://w3_url... |
#!python
# #define VAR foo
if __name__ == '__main__':
foo = 1
print VAR
|
#! /usr/bin/env python3
# image to ascii pixelwise
# Copyright Lesmana Zimmer lesmana@gmx.de
# Licensed under WTFPL version 2
# http://www.wtfpl.net/about/
import sys
import itertools
import pprint
from PIL import Image
try:
filename = sys.argv[1]
except:
print('need argument: filename of image')
sys.exit(1)
... |
import pytest
from wtforms import Form
from tests import MultiDict
from wtforms_alchemy import DataRequired, PhoneNumberField
class TestPhoneNumberField(object):
def setup_method(self, method):
self.valid_phone_numbers = [
'040 1234567',
'+358 401234567',
'09 2501234',... |
#!/usr/bin/env python
import _init_paths
import gym
from tf_rl.controller import DiscreteDeepQ, NL
specname = 'CartPole-v0'
serializedname = 'dqntest_'+specname+'.pbx'
spec = gym.spec(specname)
env = spec.make()
episode_count = 250
max_steps = 10000
action_space = env.action_space
maxaction = action_space.n
observ... |
import environ
ROOT_DIR = environ.Path(__file__) - 3
APPS_DIR = ROOT_DIR.path('app')
env = environ.Env()
READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', default=True)
if READ_DOT_ENV_FILE:
env_file = str(ROOT_DIR.path('.env'))
env.read_env(env_file)
DJANGO_APPS = [
'django.contrib.auth',
'... |
from py_to_win_app import Project
app_name = "fastapi-desktop"
p = Project(
input_dir=f"examples/{app_name}", main_file="main.py", app_name=app_name
)
p.build(
python_version="3.9.7",
requirements_file=f"examples/{app_name}/requirements.txt",
exe_name=app_name,
)
p.make_dist(delete_build_dir=True)
|
from __future__ import unicode_literals
from django import forms
from django.contrib.auth import authenticate
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from django.utils.translation import ugettext_lazy as _
from .models import User
class UserCreationForm(UserCreationForm):
... |
# Generated by Django 2.0.5 on 2018-05-26 10:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0005_auto_20180504_1652'),
]
operations = [
migrations.AlterModelOptions(
name='artist',
options={'verbose_na... |
"Utils functions."
import matplotlib.pyplot as plt
import numpy as np
from functools import wraps
def unpack_first_arg(f):
"Treat the second dimension of the first arg as independent inputs"
@wraps(f)
def g(*args, **kwargs):
if len(args) == 1 and isinstance(args[0], np.ndarray):
retur... |
from gibson.envs.mobile_robots_env import TurtlebotNavigateSpeedControlEnv
from gibson.utils.play import play
import argparse
import os
import pybullet as p
import pybullet_data
import numpy as np
config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'configs', 'play', 'tr_position_control.yaml... |
import tensorflow as tf
from tensorflow.contrib.tpu.python.tpu import keras_support
from tensorflow.keras.applications import NASNetLarge
from tensorflow.keras.layers import Input, Conv2D, BatchNormalization, Activation, Add, AveragePooling2D, GlobalAveragePooling2D, Dense
from tensorflow.keras.optimizers import Ad... |
#!/usr/bin/env python
import threading, os, time
from scapy.all import *
################################## USER INPUT VARIABLES #####################################
pcapFilename = 'droneAlert.pcap'
isDebug = True # Set isDebug = False when monitor mode is needed to be setup
interfa... |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not us... |
#!/usr/bin/env python3
"""
Spark API Python example.
This script retrieves an access token then fetches available Spark contracts.
It requires Python 3 (version >=3.1)
Usage:
- By providing the client_credentials file path
$ python spark_api_example.py <client_credentials_csv_file_path>
- By providing 2 environme... |
# coding=utf-8
"""Classes for handling experimental datasets used by mmCIF models.
"""
class Dataset(object):
"""A set of input data, for example, a crystal structure or EM map.
:param location: a pointer to where the
dataset is stored. This is usually a subclass of
:class:`~i... |
from .blame import *
from .subcommand import *
SUBCOMMANDS = {"blame": Subcommand(blame.blame_args_validator, blame.blame_handler)}
|
# -*- coding: utf-8 -*-
#==========================================
# Title: optimization.py
# Author: Binxin Ru and Ahsan Alvi
# Date: 20 August 2019
# Link: https://arxiv.org/abs/1906.08878
#==========================================
"""Optimization utilities"""
from typing import Callable, Optional, Dict
impo... |
import re
import string
import random
import jsonlines
# read in 5 lines at a time
# line one is sentence with a mask
# line two is [MASK]
# line three is the options (candidates)
# line four is answer
def main():
with open('pdp-test.txt', 'r') as wsc:
for i in range(564):
encoded_schema = rea... |
# -*- coding: UTF-8 -*-
# @desc while loop
count = 10
while (count > 0):
print 'the count is:', count;
count = count - 1;
print 'it is over...'
|
from collections import defaultdict
import sys
import io
import os
from tqdm import tqdm
import math
import torch
from torch import nn
import numpy as np
import thop
import time
from copy import deepcopy
from torchvision import ops
import contextlib
from typing import Dict, List, Tuple
import torch.distributed as dist
... |
import re
from datetime import datetime, timedelta, tzinfo
from dimagi.utils.parsing import ISO_DATE_FORMAT
def map_reduce(emitfunc=lambda rec: [(None,)], reducefunc=lambda v: v, data=None, include_docs=False):
"""perform a "map-reduce" on the data
emitfunc(rec): return an iterable of key-value pairings as (... |
from branch.branch.report.accounts_receivable_branch.accounts_receivable_branch import ReceivablePayableReport
def execute(filters=None):
args = {
"party_type": "Supplier",
"naming_by": ["Buying Settings", "supp_master_name"],
}
return ReceivablePayableReport(filters).run(args) |
from collections import namedtuple
EnvInfo = namedtuple('EnvInfo',
['discount',
'game_score',
'traj_done',
'internal_state'
])
|
class Solution:
def diffWaysToCompute(self, input: str) -> [int]:
end = []
op = {'+': lambda x, y: x + y,
'-': lambda x, y: x - y,
'*': lambda x, y: x * y}
for i in range(len(input)):
if input[i] in op.keys():
for left in self.diffWays... |
# This module performs the conversion of T^{mu nu}
# in Spherical or Cartesian coordinates
# given as *numerical* expressions (i.e., given as
# numerical values with fixed floating-point precision;
# e.g., in the case of an initial data solver), to
# rescaled BSSN stress-energy source terms.
# Author: Zachariah B. Eti... |
import time
import pandas as pd
start_time = time.time()
data = pd.read_csv('../input/aqi-first-data-from-april-2018/data-esp8266-1129419-2018-04-20.csv',sep=';',header=0,skiprows=1,index_col=False,names=['date','2','3','4','5','6','7','pm25','pm10','10','11','temp','hum','14','15','16','17','18','19','20','21'],usec... |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from jd_assistant import Assistant
if __name__ == '__main__':
"""
重要提示:此处为示例代码之一,请移步下面的链接查看使用教程👇
https://github.com/tychxn/jd-assistant/wiki/1.-%E4%BA%AC%E4%B8%9C%E6%8A%A2%E8%B4%AD%E5%8A%A9%E6%89%8B%E7%94%A8%E6%B3%95
"""
# 执行预约抢购
# 5个参数
# sku_i... |
from django.urls import path
from petstagram.pets.views import PetsListView, CreatePetView, UpdatePetView, DeletePetView, PetDetailsView, \
LikePetView, CommentPetView # like_pet, details_or_comment_pet, list_pets, create_pet, edit_pet, delete_pet
urlpatterns = [
# path('', list_pets, name='list pets'),
... |
from watchmen.topic.topic import Topic
def create_topic_index(topic: Topic):
pass
def __create_topic_table(topic: Topic, config=None):
pass
def update_topic_index(topic: Topic):
pass
def create_topic_table(topic: Topic):
pass
|
#!/usr/bin/env python
from math import fabs
import rospy
import actionlib
from std_msgs.msg import Float32
from chapter15.msg import RotationAction, RotationFeedback, RotationResult
from chapter15.srv import Light, LightResponse
from fake_actuator import FakeActuator
def volume_callback(msg):
volume = min(100,... |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: plugins/shuffle/protobuf/message.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.prot... |
# encoding: utf-8
import json
import random
from datetime import timedelta
import pytest
from api.authenticator import BasicAuthenticationProvider
from api.circulation import CirculationAPI, FulfillmentInfo, HoldInfo, LoanInfo
from api.circulation_exceptions import *
from api.config import Configuration, temp_config
... |
# Generated by Django 2.2.6 on 2019-11-18 19:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='course',
name='length',
... |
import warnings
warnings.simplefilter(action="ignore", category=RuntimeWarning)
warnings.simplefilter(action="ignore", category=PendingDeprecationWarning)
import pytest
import os
from tempfile import NamedTemporaryFile, mkdtemp
ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "test_data/")
from sparse_... |
from nideconv.hierarchical_bayes.backends import HierarchicalStanModel
import numpy as np
from nose.tools import assert_greater
from numpy.testing import assert_allclose
def test_simple_model():
n_subjects = 15
n_cols = 5
length_signal = 10
beta_subject = np.random.randn(n_subjects, n_cols) + np.aran... |
z = int(input())
while z > 0:
num = int(input())
temp = s = 0
for y in range(0 , num):
if temp == 0:
s += 1
temp = 1
else:
s -= 1
temp = 0
print(s)
z -= 1 |
#!/usr/bin/env python
"""
__author__ = "Axelle Apvrille"
__status__ = "Alpha"
__license__ = "MIT License"
"""
country = { 'af' : 0,
'ax':1,
'al':2,
'dz':3,
'as':4,
'ad':5,
'ao':6,
'ai':7,
'aq':8,
'ag':9,
... |
#
# Copyright (C) 2021 Satoru SATOH <satoru.satoh @ gmail.com>
# SPDX-License-Identifier: MIT
#
# pylint: disable=inherit-non-class,too-few-public-methods
"""anyconfig basic data types.
"""
import pathlib
import typing
IOI_PATH_STR: str = 'path'
IOI_PATH_OBJ: str = 'pathlib.Path'
IOI_STREAM: str = 'stream'
IOI_TYPES... |
import numpy as np
a = np.zeros((2,3))
print(a)
p = np.full((3,4),12)
print(p)
k = np.eye((3))
print(k)
k = np.arange(9)
print(k) |
import turtle
bob = turtle.Turtle()
def square(bob):
for i in range(4):
bob.fd(100)
bob.lt(90)
print(bob)
square(bob)
turtle.mainloop()
|
"""Contsructor to take a Python dict containing an API Documentation and
create a HydraDoc object for it
"""
import re
import json
from pyld import jsonld
import requests
from hydra_python_core.doc_writer import (HydraDoc, HydraClass, HydraClassProp,
HydraClassOp, HydraStatus, ... |
import json
from signbank.dictionary.models import *
from signbank.settings.server_specific import *
# these are the categories that are displayed in chartjs in the GlossFrequencyView template
SEX_CATEGORIES = ['Female', 'Male']
AGE_CATEGORIES = ['< 25', '25 - 35', '36 - 65', '> 65']
def collect_speaker_age_data(sp... |
# Generated by Django 3.1.13 on 2021-10-26 13:54
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('app', '0087_auto_20211026_0131'),
]
operations = [
migrations.RemoveField(
model_name='descrierepage',
name='bolta_peste_pr... |
"""INSTEON Standard Receive Message Type 0x50."""
from insteonplm.constants import (MESSAGE_STANDARD_MESSAGE_RECEIVED_0X50,
MESSAGE_STANDARD_MESSAGE_RECIEVED_SIZE)
from insteonplm.address import Address
from insteonplm.messages.message import Message
from insteonplm.messages.messageFl... |
import unittest
import tempfile
try:
from io import StringIO
except ImportError:
from StringIO import StringIO
from vcfremapper.sort_vcf import sort_vcf
class TestSortVcf(unittest.TestCase):
''' test sort_vcf function
'''
def test_sort_vcf(self):
''' check sort_vcf function
'''
... |
"""
Given a binary tree, return all root-to-leaf paths.
Note: A leaf is a node with no children.
Example:
Input:
1
/ \
2 3
\
5
Output: ["1->2->5", "1->3"]
Explanation: All root-to-leaf paths are: 1->2->5, 1->3
"""
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(sel... |
import csv
import numpy as np
import matplotlib.pyplot as plt
fig, ax, = plt.subplots()
y = np.zeros(19)
x = np.zeros(19)
index = 0
N = 19
ind = np.arange(N)
width = 0.5
msg = ax.annotate('Click bars for annotation', xy=(0, 0), xytext=(2012, 20000))
category = []
categoryFoodName = []
with open("food_imports.csv", '... |
import click
import six
from corgie import scheduling, residuals, helpers, stack
from corgie.log import logger as corgie_logger
from corgie.layers import get_layer_types, DEFAULT_LAYER_TYPE, \
str_to_layer_type
from corgie.boundingcube import get_bcube_from_coords
from corgie.argparsers i... |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to... |
from django.contrib.auth.models import User
from allauth.socialaccount.helpers import complete_social_login
from allauth.socialaccount.helpers import render_authentication_error
from allauth.socialaccount import requests
from allauth.socialaccount.models import SocialAccount, SocialLogin
from provider import PersonaP... |
"""A collection of functions to parse STEM input and output files"""
from __future__ import division
import os.path
from glob import glob
import re
import numpy as np
import pandas as pd
import datetime
from netCDF4 import Dataset
# --------------------------------------------------
# helper classes, functions
clas... |
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
# @param {ListNode} l1
# @param {ListNode} l2
# @return {ListNode}
def mergeTwoLists(self, l1, l2):
dummy = ListNode(None)
temp1 = l1
... |
# -*- coding: utf-8 -*-
import datetime
from gurobipy import *
from itertools import product
def maxpooling2d(model, layer, inputs):
return inputs
|
import numpy as np
from scipy import ndimage
_categories = (-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1,
1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, ... |
import torch.nn as nn
import torch
import numpy as np
import torch.nn.functional as F
def calculate_pad_same(image_size, kernel_size, stride):
"""
Calculates the padding to get the "same" size as in Tensorflow
Only works for images were filter covers the complete image in the convolution
"""
print... |
import os
import subprocess
import pytest
from tests import config as conf
from tests import experiment as exp
@pytest.mark.e2e_cpu
def test_support_bundle() -> None:
exp_id = exp.run_basic_test(
config_file=conf.fixtures_path("no_op/single-one-short-step.yaml"),
model_def_file=conf.fixtures_pat... |
# Generated by Django 2.2.11 on 2020-04-03 02:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("core", "0011_auto_20191104_0104")]
operations = [
migrations.AddField(
model_name="reportentry",
name="report_type",
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.