commit
stringlengths
40
40
old_file
stringlengths
4
234
new_file
stringlengths
4
234
old_contents
stringlengths
10
3.01k
new_contents
stringlengths
19
3.38k
subject
stringlengths
16
736
message
stringlengths
17
2.63k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
82.6k
config
stringclasses
4 values
content
stringlengths
134
4.41k
fuzzy_diff
stringlengths
29
3.44k
2534e20b372e5916a977d9ebf28172e393127812
src/org/jcodegen/java/FunctionArgument.java
src/org/jcodegen/java/FunctionArgument.java
package org.jcodegen.java; /** * ... */ public class FunctionArgument { private final String name; private final String type; private final boolean isFinal; public FunctionArgument(final String type, final String name) { this(type, name, true); } public FunctionArgument(final String type, final String name, final boolean isFinal) { this.type = type; this.name = name; this.isFinal = isFinal; } @Override public String toString() { final StringBuilder buf = new StringBuilder(); if (isFinal) buf.append("final "); buf.append(type); buf.append(" "); buf.append(name); return buf.toString(); } }
package org.jcodegen.java; /** * ... */ public class FunctionArgument { private final String name; private final String type; private final boolean isFinal; public FunctionArgument(final String type, final String name) { this(type, name, true); } public FunctionArgument(final String type, final String name, final boolean isFinal) { this.type = type; this.name = name; this.isFinal = isFinal; } @Override public String toString() { final StringBuilder buf = new StringBuilder(); if (isFinal) buf.append("final "); buf.append(type); buf.append(" "); buf.append(name); return buf.toString(); } public String getName() { return name; } public String getType() { return type; } public boolean isFinal() { return isFinal; } }
Add getters for constructor parameters
Add getters for constructor parameters
Java
apache-2.0
cpythoud/java-codegen
java
## Code Before: package org.jcodegen.java; /** * ... */ public class FunctionArgument { private final String name; private final String type; private final boolean isFinal; public FunctionArgument(final String type, final String name) { this(type, name, true); } public FunctionArgument(final String type, final String name, final boolean isFinal) { this.type = type; this.name = name; this.isFinal = isFinal; } @Override public String toString() { final StringBuilder buf = new StringBuilder(); if (isFinal) buf.append("final "); buf.append(type); buf.append(" "); buf.append(name); return buf.toString(); } } ## Instruction: Add getters for constructor parameters ## Code After: package org.jcodegen.java; /** * ... */ public class FunctionArgument { private final String name; private final String type; private final boolean isFinal; public FunctionArgument(final String type, final String name) { this(type, name, true); } public FunctionArgument(final String type, final String name, final boolean isFinal) { this.type = type; this.name = name; this.isFinal = isFinal; } @Override public String toString() { final StringBuilder buf = new StringBuilder(); if (isFinal) buf.append("final "); buf.append(type); buf.append(" "); buf.append(name); return buf.toString(); } public String getName() { return name; } public String getType() { return type; } public boolean isFinal() { return isFinal; } }
// ... existing code ... return buf.toString(); } public String getName() { return name; } public String getType() { return type; } public boolean isFinal() { return isFinal; } } // ... rest of the code ...
05ddf0fff9469ae0173809eb559486ff216231a0
test/test_scripts.py
test/test_scripts.py
import pytest import subprocess @pytest.mark.parametrize("script", []) def test_script(script): try: subprocess.check_output([script, '-h'], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: print e.output assert e.returncode == 0
import pytest import subprocess @pytest.mark.parametrize("script", ['bin/cast-example']) def test_script(script): try: subprocess.check_output([script, '-h'], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: print e.output assert e.returncode == 0
Add example-cast to sanity test
Add example-cast to sanity test
Python
mit
maxzheng/clicast
python
## Code Before: import pytest import subprocess @pytest.mark.parametrize("script", []) def test_script(script): try: subprocess.check_output([script, '-h'], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: print e.output assert e.returncode == 0 ## Instruction: Add example-cast to sanity test ## Code After: import pytest import subprocess @pytest.mark.parametrize("script", ['bin/cast-example']) def test_script(script): try: subprocess.check_output([script, '-h'], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: print e.output assert e.returncode == 0
... import subprocess @pytest.mark.parametrize("script", ['bin/cast-example']) def test_script(script): try: ...
73da02cc3cc33f287a78f9d7d0c7904058ec8c01
setup.py
setup.py
from setuptools import setup setup( name='nchp', version='0.1', description='NGINX based Configurable HTTP Proxy for use with jupyterhub', url='http://github.com/yuvipanda/jupyterhub-nginx-chp', author='Yuvi Panda', author_email='[email protected]', license='BSD', packages=['nchp'], entry_points={ 'console_scripts': [ 'nchp = nchp.__main__:main' ] } )
from setuptools import setup setup( name='nchp', version='0.1', description='NGINX based Configurable HTTP Proxy for use with jupyterhub', url='http://github.com/yuvipanda/jupyterhub-nginx-chp', author='Yuvi Panda', author_email='[email protected]', license='BSD', packages=['nchp'], include_package_data=True, entry_points={ 'console_scripts': [ 'nchp = nchp.__main__:main' ] } )
Make sure that pip will install the template files too
Make sure that pip will install the template files too
Python
bsd-3-clause
yuvipanda/jupyterhub-nginx-chp
python
## Code Before: from setuptools import setup setup( name='nchp', version='0.1', description='NGINX based Configurable HTTP Proxy for use with jupyterhub', url='http://github.com/yuvipanda/jupyterhub-nginx-chp', author='Yuvi Panda', author_email='[email protected]', license='BSD', packages=['nchp'], entry_points={ 'console_scripts': [ 'nchp = nchp.__main__:main' ] } ) ## Instruction: Make sure that pip will install the template files too ## Code After: from setuptools import setup setup( name='nchp', version='0.1', description='NGINX based Configurable HTTP Proxy for use with jupyterhub', url='http://github.com/yuvipanda/jupyterhub-nginx-chp', author='Yuvi Panda', author_email='[email protected]', license='BSD', packages=['nchp'], include_package_data=True, entry_points={ 'console_scripts': [ 'nchp = nchp.__main__:main' ] } )
// ... existing code ... author_email='[email protected]', license='BSD', packages=['nchp'], include_package_data=True, entry_points={ 'console_scripts': [ 'nchp = nchp.__main__:main' // ... rest of the code ...
df57dacf8f5ec7f697247fed39ce86d3cde45615
tests/tests_plotting/test_misc.py
tests/tests_plotting/test_misc.py
import pytest from matplotlib import pyplot as plt from poliastro.plotting import OrbitPlotter2D, OrbitPlotter3D from poliastro.plotting.misc import plot_solar_system @pytest.mark.parametrize("outer,expected", [(True, 8), (False, 4)]) def test_plot_solar_system_has_expected_number_of_orbits(outer, expected): assert len(plot_solar_system(outer).trajectories) == expected @pytest.mark.parametrize( "use_3d, plotter_class", [(True, OrbitPlotter3D), (False, OrbitPlotter2D)] ) def test_plot_solar_system_uses_expected_orbitplotter(use_3d, plotter_class): assert isinstance(plot_solar_system(use_3d=use_3d, interactive=True), plotter_class) @pytest.mark.mpl_image_compare def test_plot_inner_solar_system_static(earth_perihelion): plot_solar_system(outer=False, epoch=earth_perihelion) return plt.gcf() @pytest.mark.mpl_image_compare def test_plot_outer_solar_system_static(earth_perihelion): plot_solar_system(outer=True, epoch=earth_perihelion) return plt.gcf()
import pytest from matplotlib import pyplot as plt from poliastro.plotting import OrbitPlotter2D, OrbitPlotter3D from poliastro.plotting.misc import plot_solar_system @pytest.mark.parametrize("outer,expected", [(True, 8), (False, 4)]) def test_plot_solar_system_has_expected_number_of_orbits(outer, expected): assert len(plot_solar_system(outer).trajectories) == expected @pytest.mark.parametrize( "use_3d, plotter_class", [(True, OrbitPlotter3D), (False, OrbitPlotter2D)] ) def test_plot_solar_system_uses_expected_orbitplotter(use_3d, plotter_class): assert isinstance(plot_solar_system(use_3d=use_3d, interactive=True), plotter_class) if use_3d: with pytest.raises(ValueError) as excinfo: plot_solar_system(use_3d=use_3d) assert ("The static plotter does not support 3D" in excinfo.exconly()) @pytest.mark.mpl_image_compare def test_plot_inner_solar_system_static(earth_perihelion): plot_solar_system(outer=False, epoch=earth_perihelion) return plt.gcf() @pytest.mark.mpl_image_compare def test_plot_outer_solar_system_static(earth_perihelion): plot_solar_system(outer=True, epoch=earth_perihelion) return plt.gcf()
Check for error if use_3D and non-interactive
Check for error if use_3D and non-interactive
Python
mit
poliastro/poliastro
python
## Code Before: import pytest from matplotlib import pyplot as plt from poliastro.plotting import OrbitPlotter2D, OrbitPlotter3D from poliastro.plotting.misc import plot_solar_system @pytest.mark.parametrize("outer,expected", [(True, 8), (False, 4)]) def test_plot_solar_system_has_expected_number_of_orbits(outer, expected): assert len(plot_solar_system(outer).trajectories) == expected @pytest.mark.parametrize( "use_3d, plotter_class", [(True, OrbitPlotter3D), (False, OrbitPlotter2D)] ) def test_plot_solar_system_uses_expected_orbitplotter(use_3d, plotter_class): assert isinstance(plot_solar_system(use_3d=use_3d, interactive=True), plotter_class) @pytest.mark.mpl_image_compare def test_plot_inner_solar_system_static(earth_perihelion): plot_solar_system(outer=False, epoch=earth_perihelion) return plt.gcf() @pytest.mark.mpl_image_compare def test_plot_outer_solar_system_static(earth_perihelion): plot_solar_system(outer=True, epoch=earth_perihelion) return plt.gcf() ## Instruction: Check for error if use_3D and non-interactive ## Code After: import pytest from matplotlib import pyplot as plt from poliastro.plotting import OrbitPlotter2D, OrbitPlotter3D from poliastro.plotting.misc import plot_solar_system @pytest.mark.parametrize("outer,expected", [(True, 8), (False, 4)]) def test_plot_solar_system_has_expected_number_of_orbits(outer, expected): assert len(plot_solar_system(outer).trajectories) == expected @pytest.mark.parametrize( "use_3d, plotter_class", [(True, OrbitPlotter3D), (False, OrbitPlotter2D)] ) def test_plot_solar_system_uses_expected_orbitplotter(use_3d, plotter_class): assert isinstance(plot_solar_system(use_3d=use_3d, interactive=True), plotter_class) if use_3d: with pytest.raises(ValueError) as excinfo: plot_solar_system(use_3d=use_3d) assert ("The static plotter does not support 3D" in excinfo.exconly()) @pytest.mark.mpl_image_compare def test_plot_inner_solar_system_static(earth_perihelion): plot_solar_system(outer=False, epoch=earth_perihelion) return plt.gcf() @pytest.mark.mpl_image_compare def test_plot_outer_solar_system_static(earth_perihelion): plot_solar_system(outer=True, epoch=earth_perihelion) return plt.gcf()
# ... existing code ... def test_plot_solar_system_uses_expected_orbitplotter(use_3d, plotter_class): assert isinstance(plot_solar_system(use_3d=use_3d, interactive=True), plotter_class) if use_3d: with pytest.raises(ValueError) as excinfo: plot_solar_system(use_3d=use_3d) assert ("The static plotter does not support 3D" in excinfo.exconly()) @pytest.mark.mpl_image_compare def test_plot_inner_solar_system_static(earth_perihelion): # ... rest of the code ...
8c2996b94cdc3210b24ebeaeb957c625629f68a5
hunting/level/encoder.py
hunting/level/encoder.py
import json import hunting.sim.entities as entities class GameObjectEncoder(json.JSONEncoder): def default(self, o): d = o.__dict__ d.pop('owner', None) if isinstance(o, entities.GameObject): d.pop('log', None) d.pop('ai', None) return d elif isinstance(o, entities.Fighter): d.pop('death_function') return d elif isinstance(o, entities.ChangeableProperty): return {k: o.__dict__[k] for k in ['property_type', 'base']} else: return d def encode_level(level): save_factions = [f for f in level.get_factions() if level.get_faction_info(f)['save'] is True] factions_to_objects = {f: level.get_objects_inside_faction(f) for f in save_factions} return json.dumps(factions_to_objects, cls=GameObjectEncoder, indent=2)
import json import hunting.sim.entities as entities class GameObjectEncoder(json.JSONEncoder): def default(self, o): d = o.__dict__ d.pop('owner', None) if isinstance(o, entities.GameObject): d.pop('log', None) d.pop('ai', None) return d elif isinstance(o, entities.Fighter): d.pop('death_function') return d elif isinstance(o, entities.ChangeableProperty): return {k: o.__dict__[k] for k in ['property_type', 'base']} else: return d def encode_level(level): save_factions = {f: level.get_faction_info(f) for f in level.get_factions() if level.get_faction_info(f)['save'] is True} for f in save_factions: save_factions[f]['objects'] = level.get_objects_inside_faction(f) output = {'log': level.log.events, 'factions': save_factions} return json.dumps(output, cls=GameObjectEncoder, indent=2)
Add log to encoding output (still fails due to objects)
Add log to encoding output (still fails due to objects)
Python
mit
MoyTW/RL_Arena_Experiment
python
## Code Before: import json import hunting.sim.entities as entities class GameObjectEncoder(json.JSONEncoder): def default(self, o): d = o.__dict__ d.pop('owner', None) if isinstance(o, entities.GameObject): d.pop('log', None) d.pop('ai', None) return d elif isinstance(o, entities.Fighter): d.pop('death_function') return d elif isinstance(o, entities.ChangeableProperty): return {k: o.__dict__[k] for k in ['property_type', 'base']} else: return d def encode_level(level): save_factions = [f for f in level.get_factions() if level.get_faction_info(f)['save'] is True] factions_to_objects = {f: level.get_objects_inside_faction(f) for f in save_factions} return json.dumps(factions_to_objects, cls=GameObjectEncoder, indent=2) ## Instruction: Add log to encoding output (still fails due to objects) ## Code After: import json import hunting.sim.entities as entities class GameObjectEncoder(json.JSONEncoder): def default(self, o): d = o.__dict__ d.pop('owner', None) if isinstance(o, entities.GameObject): d.pop('log', None) d.pop('ai', None) return d elif isinstance(o, entities.Fighter): d.pop('death_function') return d elif isinstance(o, entities.ChangeableProperty): return {k: o.__dict__[k] for k in ['property_type', 'base']} else: return d def encode_level(level): save_factions = {f: level.get_faction_info(f) for f in level.get_factions() if level.get_faction_info(f)['save'] is True} for f in save_factions: save_factions[f]['objects'] = level.get_objects_inside_faction(f) output = {'log': level.log.events, 'factions': save_factions} return json.dumps(output, cls=GameObjectEncoder, indent=2)
... def encode_level(level): save_factions = {f: level.get_faction_info(f) for f in level.get_factions() if level.get_faction_info(f)['save'] is True} for f in save_factions: save_factions[f]['objects'] = level.get_objects_inside_faction(f) output = {'log': level.log.events, 'factions': save_factions} return json.dumps(output, cls=GameObjectEncoder, indent=2) ...
5e5a6a55d43bf66c7f71d054b92a66528bf2a571
driver/driver.py
driver/driver.py
from abc import ABCMeta, abstractmethod class Driver(metaclass=ABCMeta): @abstractmethod def create(self): pass @abstractmethod def resize(self, id, quota): pass @abstractmethod def clone(self, id): pass @abstractmethod def remove(self, id): pass @abstractmethod def expose(self, id): pass
from abc import ABCMeta, abstractmethod class Driver(metaclass=ABCMeta): @abstractmethod def create(self, requirements): pass @abstractmethod def _set_quota(self, id, quota): pass @abstractmethod def resize(self, id, quota): pass @abstractmethod def clone(self, id): pass @abstractmethod def remove(self, id): pass @abstractmethod def expose(self, id, host, permissions): pass
Fix inconsistency in parameters with base class
Fix inconsistency in parameters with base class
Python
apache-2.0
PressLabs/cobalt,PressLabs/cobalt
python
## Code Before: from abc import ABCMeta, abstractmethod class Driver(metaclass=ABCMeta): @abstractmethod def create(self): pass @abstractmethod def resize(self, id, quota): pass @abstractmethod def clone(self, id): pass @abstractmethod def remove(self, id): pass @abstractmethod def expose(self, id): pass ## Instruction: Fix inconsistency in parameters with base class ## Code After: from abc import ABCMeta, abstractmethod class Driver(metaclass=ABCMeta): @abstractmethod def create(self, requirements): pass @abstractmethod def _set_quota(self, id, quota): pass @abstractmethod def resize(self, id, quota): pass @abstractmethod def clone(self, id): pass @abstractmethod def remove(self, id): pass @abstractmethod def expose(self, id, host, permissions): pass
# ... existing code ... class Driver(metaclass=ABCMeta): @abstractmethod def create(self, requirements): pass @abstractmethod def _set_quota(self, id, quota): pass @abstractmethod # ... modified code ... pass @abstractmethod def expose(self, id, host, permissions): pass # ... rest of the code ...
c5c810165dcfc37b6d413133b49cd2a3e58449f6
openrest-generator-commons/src/main/java/pl/openrest/generator/commons/type/TypeFileWriter.java
openrest-generator-commons/src/main/java/pl/openrest/generator/commons/type/TypeFileWriter.java
package pl.openrest.generator.commons.type; import java.io.File; import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.squareup.javapoet.JavaFile; import com.squareup.javapoet.TypeSpec; public class TypeFileWriter { private final File outputDirectory; private static final Logger LOGGER = LoggerFactory.getLogger(TypeFileWriter.class); public TypeFileWriter(File outputDirectory) { this.outputDirectory = outputDirectory; } public void write(TypeSpec typeSpec, String packageName) { JavaFile file = JavaFile.builder(packageName, typeSpec).build(); try { file.writeTo(outputDirectory); } catch (IOException e) { LOGGER.error(String.format("Error while writing %s", typeSpec.name), e); } } }
package pl.openrest.generator.commons.type; import java.io.File; import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.squareup.javapoet.JavaFile; import com.squareup.javapoet.TypeSpec; public class TypeFileWriter { private final File outputDirectory; private static final Logger LOGGER = LoggerFactory.getLogger(TypeFileWriter.class); public TypeFileWriter(File outputDirectory) { this.outputDirectory = outputDirectory; } public void write(TypeSpec typeSpec, String packageName) { JavaFile file = JavaFile.builder(packageName, typeSpec).skipJavaLangImports(true).build(); try { file.writeTo(outputDirectory); } catch (IOException e) { LOGGER.error(String.format("Error while writing %s", typeSpec.name), e); } } }
Set skipJavaLangImports flag to true
Set skipJavaLangImports flag to true
Java
apache-2.0
konik32/openrest
java
## Code Before: package pl.openrest.generator.commons.type; import java.io.File; import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.squareup.javapoet.JavaFile; import com.squareup.javapoet.TypeSpec; public class TypeFileWriter { private final File outputDirectory; private static final Logger LOGGER = LoggerFactory.getLogger(TypeFileWriter.class); public TypeFileWriter(File outputDirectory) { this.outputDirectory = outputDirectory; } public void write(TypeSpec typeSpec, String packageName) { JavaFile file = JavaFile.builder(packageName, typeSpec).build(); try { file.writeTo(outputDirectory); } catch (IOException e) { LOGGER.error(String.format("Error while writing %s", typeSpec.name), e); } } } ## Instruction: Set skipJavaLangImports flag to true ## Code After: package pl.openrest.generator.commons.type; import java.io.File; import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.squareup.javapoet.JavaFile; import com.squareup.javapoet.TypeSpec; public class TypeFileWriter { private final File outputDirectory; private static final Logger LOGGER = LoggerFactory.getLogger(TypeFileWriter.class); public TypeFileWriter(File outputDirectory) { this.outputDirectory = outputDirectory; } public void write(TypeSpec typeSpec, String packageName) { JavaFile file = JavaFile.builder(packageName, typeSpec).skipJavaLangImports(true).build(); try { file.writeTo(outputDirectory); } catch (IOException e) { LOGGER.error(String.format("Error while writing %s", typeSpec.name), e); } } }
# ... existing code ... } public void write(TypeSpec typeSpec, String packageName) { JavaFile file = JavaFile.builder(packageName, typeSpec).skipJavaLangImports(true).build(); try { file.writeTo(outputDirectory); } catch (IOException e) { # ... rest of the code ...
6aa3ed3634a22b89f7128883d20f28f65ed00152
basic.py
basic.py
import os # needed for opening/compiling file import time # needed for delay def getPath(allowCancel = True): """Ask the user for lilypond file path and return it as string. Takes one boolean argument as to whether message should say cancelling is allowed or not. Defaults to true, however this may not be suitable for where the path is needed for initialisation.""" if allowCancel == True: question = "Enter path of lilypond file (including file but without extension), or enter nothing to cancel: " else: question = "Enter path of lilypond file (including file but without extension): " path = raw_input(question) return path logwait = 5 # how long the program waits before opening the log answer = "" path = "" while path == "": path = getPath(False) while answer.lower() != "e": answer = raw_input("Enter Y or C to compile, E to exit, or P to change file path: ") if answer.lower() == "y" or answer.lower() == "c": os.startfile(path + ".ly") print "Opening log file in " + str(logwait) + " seconds..." time.sleep(logwait) print "Log file: ==========================" logfile = open(path + ".log", "r") print logfile.read() print "End of log file: ===================" print "====================================" elif answer.lower() == "p": path = getPath()
import os; # needed for opening/compiling file import time; # needed for delay def getPath(allowCancel = True): """Ask the user for lilypond file path and return it as string. Takes one boolean argument as to whether message should say cancelling is allowed or not. Defaults to true, however this may not be suitable for where the path is needed for initialisation.""" if allowCancel == True: question = "Enter path of lilypond file (including file but without extension), or enter nothing to cancel: "; else: question = "Enter path of lilypond file (including file but without extension): "; path = raw_input(question); return path; logwait = 5; # how long the program waits before opening the log answer = ""; path = ""; while path == "": path = getPath(False); while answer.lower() != "e": answer = raw_input("Enter Y or C to compile, E to exit, or P to change file path: "); if answer.lower() == "y" or answer.lower() == "c": os.startfile(path + ".ly"); print "Opening log file in " + str(logwait) + " seconds..."; time.sleep(logwait); print "Log file: =========================="; logfile = open(path + ".log", "r"); print logfile.read(); print "End of log file: ==================="; print "===================================="; elif answer.lower() == "p": path = getPath();
Add semicolons to end of program statements
Add semicolons to end of program statements
Python
unlicense
RainCity471/lyCompiler
python
## Code Before: import os # needed for opening/compiling file import time # needed for delay def getPath(allowCancel = True): """Ask the user for lilypond file path and return it as string. Takes one boolean argument as to whether message should say cancelling is allowed or not. Defaults to true, however this may not be suitable for where the path is needed for initialisation.""" if allowCancel == True: question = "Enter path of lilypond file (including file but without extension), or enter nothing to cancel: " else: question = "Enter path of lilypond file (including file but without extension): " path = raw_input(question) return path logwait = 5 # how long the program waits before opening the log answer = "" path = "" while path == "": path = getPath(False) while answer.lower() != "e": answer = raw_input("Enter Y or C to compile, E to exit, or P to change file path: ") if answer.lower() == "y" or answer.lower() == "c": os.startfile(path + ".ly") print "Opening log file in " + str(logwait) + " seconds..." time.sleep(logwait) print "Log file: ==========================" logfile = open(path + ".log", "r") print logfile.read() print "End of log file: ===================" print "====================================" elif answer.lower() == "p": path = getPath() ## Instruction: Add semicolons to end of program statements ## Code After: import os; # needed for opening/compiling file import time; # needed for delay def getPath(allowCancel = True): """Ask the user for lilypond file path and return it as string. Takes one boolean argument as to whether message should say cancelling is allowed or not. Defaults to true, however this may not be suitable for where the path is needed for initialisation.""" if allowCancel == True: question = "Enter path of lilypond file (including file but without extension), or enter nothing to cancel: "; else: question = "Enter path of lilypond file (including file but without extension): "; path = raw_input(question); return path; logwait = 5; # how long the program waits before opening the log answer = ""; path = ""; while path == "": path = getPath(False); while answer.lower() != "e": answer = raw_input("Enter Y or C to compile, E to exit, or P to change file path: "); if answer.lower() == "y" or answer.lower() == "c": os.startfile(path + ".ly"); print "Opening log file in " + str(logwait) + " seconds..."; time.sleep(logwait); print "Log file: =========================="; logfile = open(path + ".log", "r"); print logfile.read(); print "End of log file: ==================="; print "===================================="; elif answer.lower() == "p": path = getPath();
# ... existing code ... import os; # needed for opening/compiling file import time; # needed for delay def getPath(allowCancel = True): """Ask the user for lilypond file path and return it as string. Takes one boolean argument as to whether message should say cancelling is allowed or not. Defaults to true, however this may not be suitable for where the path is needed for initialisation.""" if allowCancel == True: question = "Enter path of lilypond file (including file but without extension), or enter nothing to cancel: "; else: question = "Enter path of lilypond file (including file but without extension): "; path = raw_input(question); return path; logwait = 5; # how long the program waits before opening the log answer = ""; path = ""; while path == "": path = getPath(False); while answer.lower() != "e": answer = raw_input("Enter Y or C to compile, E to exit, or P to change file path: "); if answer.lower() == "y" or answer.lower() == "c": os.startfile(path + ".ly"); print "Opening log file in " + str(logwait) + " seconds..."; time.sleep(logwait); print "Log file: =========================="; logfile = open(path + ".log", "r"); print logfile.read(); print "End of log file: ==================="; print "===================================="; elif answer.lower() == "p": path = getPath(); # ... rest of the code ...
0cc2a1c4d3dc063c06f9775a6cffef74ad3c631c
src/java/ie/omk/smpp/util/ASCIIEncoding.java
src/java/ie/omk/smpp/util/ASCIIEncoding.java
package ie.omk.smpp.util; import java.io.UnsupportedEncodingException; /** * Encoding class representing the ASCII (IA5) alphabet encoding. */ public class ASCIIEncoding extends ie.omk.smpp.util.AlphabetEncoding { private static final int DCS = 1; private static final ASCIIEncoding INSTANCE = new ASCIIEncoding(); /** * Construct a new ASCIIEncoding. */ public ASCIIEncoding() { super(DCS); try { setCharset("US-ASCII"); } catch (UnsupportedEncodingException x) { // All JVMs are required to support ASCII.. } } /** * Get the singleton instance of ASCIIEncoding. * @deprecated */ public static ASCIIEncoding getInstance() { return INSTANCE; } }
package ie.omk.smpp.util; import java.io.UnsupportedEncodingException; /** * Encoding class representing the ASCII (IA5) alphabet encoding. */ public class ASCIIEncoding extends AlphabetEncoding { private static final int DCS = 1; /** * Construct a new ASCIIEncoding. */ public ASCIIEncoding() { super(DCS); try { setCharset("US-ASCII"); } catch (UnsupportedEncodingException x) { // All JVMs are required to support ASCII.. throw new RuntimeException(); } } }
Throw a RuntimeException...just in case!
Throw a RuntimeException...just in case!
Java
bsd-3-clause
oranoceallaigh/smppapi,oranoceallaigh/smppapi,oranoceallaigh/smppapi
java
## Code Before: package ie.omk.smpp.util; import java.io.UnsupportedEncodingException; /** * Encoding class representing the ASCII (IA5) alphabet encoding. */ public class ASCIIEncoding extends ie.omk.smpp.util.AlphabetEncoding { private static final int DCS = 1; private static final ASCIIEncoding INSTANCE = new ASCIIEncoding(); /** * Construct a new ASCIIEncoding. */ public ASCIIEncoding() { super(DCS); try { setCharset("US-ASCII"); } catch (UnsupportedEncodingException x) { // All JVMs are required to support ASCII.. } } /** * Get the singleton instance of ASCIIEncoding. * @deprecated */ public static ASCIIEncoding getInstance() { return INSTANCE; } } ## Instruction: Throw a RuntimeException...just in case! ## Code After: package ie.omk.smpp.util; import java.io.UnsupportedEncodingException; /** * Encoding class representing the ASCII (IA5) alphabet encoding. */ public class ASCIIEncoding extends AlphabetEncoding { private static final int DCS = 1; /** * Construct a new ASCIIEncoding. */ public ASCIIEncoding() { super(DCS); try { setCharset("US-ASCII"); } catch (UnsupportedEncodingException x) { // All JVMs are required to support ASCII.. throw new RuntimeException(); } } }
# ... existing code ... /** * Encoding class representing the ASCII (IA5) alphabet encoding. */ public class ASCIIEncoding extends AlphabetEncoding { private static final int DCS = 1; /** * Construct a new ASCIIEncoding. # ... modified code ... setCharset("US-ASCII"); } catch (UnsupportedEncodingException x) { // All JVMs are required to support ASCII.. throw new RuntimeException(); } } } # ... rest of the code ...
052b3fc55ae2ee20e2ab6d9e29fde8a4fee7a68b
src/pomodoro.h
src/pomodoro.h
// ---------------------------------------------------------------------------- // pomodoro - Defines a model to track progress through the Pomodoro Technique // Copyright (c) 2013 Jonathan Speicher ([email protected]) // Licensed under the MIT license: http://opensource.org/licenses/MIT // ---------------------------------------------------------------------------- #pragma once #include <stdbool.h> // Defines a type to hold the various pomodoro technique segment types. typedef enum { POMODORO_SEGMENT_TYPE_POMODORO = 0, POMODORO_SEGMENT_TYPE_BREAK, POMODORO_SEGMENT_TYPE_COUNT } SegmentType; // Defines a type to hold a pomodoro technique segment. typedef struct { Interval interval; bool restart_on_abort; } Segment; // Defines a structure type to hold the pomodoro technique segment sequence. typedef struct { Segment* this_segment; Segment segments[POMODORO_SEGMENT_TYPE_COUNT]; } Pomodoro; // Initializes the pomodoro technique structure. void pomodoro_init(Pomodoro* pomodoro); // Completes the current pomodoro technique segment and causes an advance to // the next appropriate segment in the pomodoro technique sequence. void pomodoro_complete_segment(Pomodoro* pomodoro); // Aborts the current pomodoro tecnique segment and causes an advance to the // next appropriate segment in the pomodoro technique sequence. void pomodoro_abort_segment(Pomodoro* pomodoro);
// ---------------------------------------------------------------------------- // pomodoro - Defines a model to track progress through the Pomodoro Technique // Copyright (c) 2013 Jonathan Speicher ([email protected]) // Licensed under the MIT license: http://opensource.org/licenses/MIT // ---------------------------------------------------------------------------- #pragma once #include <stdbool.h> // Defines a type to hold the various pomodoro technique segment types. typedef enum { POMODORO_SEGMENT_TYPE_POMODORO = 0, POMODORO_SEGMENT_TYPE_BREAK, POMODORO_SEGMENT_TYPE_COUNT } PomodoroSegmentType; // Defines a type to hold a pomodoro technique segment. typedef struct { Interval interval; bool restart_on_abort; } PomodoroSegment; // Defines a type to hold the pomodoro technique segment sequence. typedef struct { PomodoroSegment* this_segment; PomodoroSegment segments[POMODORO_SEGMENT_TYPE_COUNT]; } Pomodoro; // Initializes the pomodoro technique sequence structure. void pomodoro_init(Pomodoro* pomodoro); // Completes the current pomodoro technique segment and causes an advance to // the next appropriate segment in the pomodoro technique sequence. void pomodoro_complete_segment(Pomodoro* pomodoro); // Aborts the current pomodoro tecnique segment and causes an advance to the // next appropriate segment in the pomodoro technique sequence. void pomodoro_abort_segment(Pomodoro* pomodoro);
Fix some comments and type names
Fix some comments and type names
C
mit
jonspeicher/Pomade,jonspeicher/Pomade,elliots/simple-demo-pebble
c
## Code Before: // ---------------------------------------------------------------------------- // pomodoro - Defines a model to track progress through the Pomodoro Technique // Copyright (c) 2013 Jonathan Speicher ([email protected]) // Licensed under the MIT license: http://opensource.org/licenses/MIT // ---------------------------------------------------------------------------- #pragma once #include <stdbool.h> // Defines a type to hold the various pomodoro technique segment types. typedef enum { POMODORO_SEGMENT_TYPE_POMODORO = 0, POMODORO_SEGMENT_TYPE_BREAK, POMODORO_SEGMENT_TYPE_COUNT } SegmentType; // Defines a type to hold a pomodoro technique segment. typedef struct { Interval interval; bool restart_on_abort; } Segment; // Defines a structure type to hold the pomodoro technique segment sequence. typedef struct { Segment* this_segment; Segment segments[POMODORO_SEGMENT_TYPE_COUNT]; } Pomodoro; // Initializes the pomodoro technique structure. void pomodoro_init(Pomodoro* pomodoro); // Completes the current pomodoro technique segment and causes an advance to // the next appropriate segment in the pomodoro technique sequence. void pomodoro_complete_segment(Pomodoro* pomodoro); // Aborts the current pomodoro tecnique segment and causes an advance to the // next appropriate segment in the pomodoro technique sequence. void pomodoro_abort_segment(Pomodoro* pomodoro); ## Instruction: Fix some comments and type names ## Code After: // ---------------------------------------------------------------------------- // pomodoro - Defines a model to track progress through the Pomodoro Technique // Copyright (c) 2013 Jonathan Speicher ([email protected]) // Licensed under the MIT license: http://opensource.org/licenses/MIT // ---------------------------------------------------------------------------- #pragma once #include <stdbool.h> // Defines a type to hold the various pomodoro technique segment types. typedef enum { POMODORO_SEGMENT_TYPE_POMODORO = 0, POMODORO_SEGMENT_TYPE_BREAK, POMODORO_SEGMENT_TYPE_COUNT } PomodoroSegmentType; // Defines a type to hold a pomodoro technique segment. typedef struct { Interval interval; bool restart_on_abort; } PomodoroSegment; // Defines a type to hold the pomodoro technique segment sequence. typedef struct { PomodoroSegment* this_segment; PomodoroSegment segments[POMODORO_SEGMENT_TYPE_COUNT]; } Pomodoro; // Initializes the pomodoro technique sequence structure. void pomodoro_init(Pomodoro* pomodoro); // Completes the current pomodoro technique segment and causes an advance to // the next appropriate segment in the pomodoro technique sequence. void pomodoro_complete_segment(Pomodoro* pomodoro); // Aborts the current pomodoro tecnique segment and causes an advance to the // next appropriate segment in the pomodoro technique sequence. void pomodoro_abort_segment(Pomodoro* pomodoro);
... POMODORO_SEGMENT_TYPE_POMODORO = 0, POMODORO_SEGMENT_TYPE_BREAK, POMODORO_SEGMENT_TYPE_COUNT } PomodoroSegmentType; // Defines a type to hold a pomodoro technique segment. ... typedef struct { Interval interval; bool restart_on_abort; } PomodoroSegment; // Defines a type to hold the pomodoro technique segment sequence. typedef struct { PomodoroSegment* this_segment; PomodoroSegment segments[POMODORO_SEGMENT_TYPE_COUNT]; } Pomodoro; // Initializes the pomodoro technique sequence structure. void pomodoro_init(Pomodoro* pomodoro); ...
25ba377b7254ed770360bb1ee5a6ef6cb631f564
openedx/stanford/djangoapps/register_cme/admin.py
openedx/stanford/djangoapps/register_cme/admin.py
from __future__ import unicode_literals from django.contrib import admin from .models import ExtraInfo class ExtraInfoAdmin(admin.ModelAdmin): """ Admin interface for ExtraInfo model. """ readonly_fields = ( 'user', ) class Meta(object): model = ExtraInfo admin.site.register(ExtraInfo, ExtraInfoAdmin)
from __future__ import unicode_literals from django.contrib import admin from .models import ExtraInfo class ExtraInfoAdmin(admin.ModelAdmin): """ Admin interface for ExtraInfo model. """ list_display = ( 'user', 'get_email', 'last_name', 'first_name', ) readonly_fields = ( 'user', ) search_fields = ( 'user__username', 'user__email', 'last_name', 'first_name', ) def get_email(self, obj): return obj.user.email get_email.short_description = 'Email address' class Meta(object): model = ExtraInfo admin.site.register(ExtraInfo, ExtraInfoAdmin)
Make ExtraInfo list user-friendly in Django Admin
Make ExtraInfo list user-friendly in Django Admin `Register_cme/extrainfo` in Django Admin was previously displaying users as `ExtraInfo` objects which admins had to click on individually to see each user's information. Each user is now displayed with fields: username, email, last and first name. Username is clickable to view more information. Added search bar enables search for users matching query for username, email, last and first name.
Python
agpl-3.0
Stanford-Online/edx-platform,caesar2164/edx-platform,caesar2164/edx-platform,Stanford-Online/edx-platform,Stanford-Online/edx-platform,caesar2164/edx-platform,Stanford-Online/edx-platform,caesar2164/edx-platform
python
## Code Before: from __future__ import unicode_literals from django.contrib import admin from .models import ExtraInfo class ExtraInfoAdmin(admin.ModelAdmin): """ Admin interface for ExtraInfo model. """ readonly_fields = ( 'user', ) class Meta(object): model = ExtraInfo admin.site.register(ExtraInfo, ExtraInfoAdmin) ## Instruction: Make ExtraInfo list user-friendly in Django Admin `Register_cme/extrainfo` in Django Admin was previously displaying users as `ExtraInfo` objects which admins had to click on individually to see each user's information. Each user is now displayed with fields: username, email, last and first name. Username is clickable to view more information. Added search bar enables search for users matching query for username, email, last and first name. ## Code After: from __future__ import unicode_literals from django.contrib import admin from .models import ExtraInfo class ExtraInfoAdmin(admin.ModelAdmin): """ Admin interface for ExtraInfo model. """ list_display = ( 'user', 'get_email', 'last_name', 'first_name', ) readonly_fields = ( 'user', ) search_fields = ( 'user__username', 'user__email', 'last_name', 'first_name', ) def get_email(self, obj): return obj.user.email get_email.short_description = 'Email address' class Meta(object): model = ExtraInfo admin.site.register(ExtraInfo, ExtraInfoAdmin)
... Admin interface for ExtraInfo model. """ list_display = ( 'user', 'get_email', 'last_name', 'first_name', ) readonly_fields = ( 'user', ) search_fields = ( 'user__username', 'user__email', 'last_name', 'first_name', ) def get_email(self, obj): return obj.user.email get_email.short_description = 'Email address' class Meta(object): model = ExtraInfo ...
f43519e2fc6faf9956febcf61185c789454a4f0f
personal_website/models.py
personal_website/models.py
from sqlalchemy_wrapper import SQLAlchemy db = SQLAlchemy(uri='sqlite:///intermediate_data.db') class BlogPostModel(db.Model): __tablename__ = 'blog_post' id = db.Column(db.Integer, primary_key=True, autoincrement=True) src_url = db.Column(db.String(128), unique=True) dest_url = db.Column(db.String(128), unique=True) blog_title = db.Column(db.String(64)) blog_description = db.Column(db.String(256)) date = db.Column(db.String(10)) # to store yyyy-mm-dd def __repr__(self): return str(dict( id=self.id, src_url=self.src_url, dest_url=self.dest_url, blog_title=self.blog_title, blog_description=self.blog_description, date=self.date, )) db.create_all()
import os from sqlalchemy_wrapper import SQLAlchemy from constants import DATABASE_NAME os.system('rm -f ' + DATABASE_NAME) db = SQLAlchemy(uri='sqlite:///' + DATABASE_NAME) class BlogPostModel(db.Model): __tablename__ = 'blog_post' id = db.Column(db.Integer, primary_key=True, autoincrement=True) src_url = db.Column(db.String(128), unique=True) dest_url = db.Column(db.String(128), unique=True) blog_title = db.Column(db.String(64)) blog_description = db.Column(db.String(256)) date = db.Column(db.String(10)) # to store yyyy-mm-dd def __repr__(self): return str(dict( id=self.id, src_url=self.src_url, dest_url=self.dest_url, blog_title=self.blog_title, blog_description=self.blog_description, date=self.date, )) db.create_all()
Fix for tests failing because of persisted database file for the next build
Fix for tests failing because of persisted database file for the next build
Python
mit
tanayseven/personal_website,tanayseven/personal_website,tanayseven/personal_website,tanayseven/personal_website
python
## Code Before: from sqlalchemy_wrapper import SQLAlchemy db = SQLAlchemy(uri='sqlite:///intermediate_data.db') class BlogPostModel(db.Model): __tablename__ = 'blog_post' id = db.Column(db.Integer, primary_key=True, autoincrement=True) src_url = db.Column(db.String(128), unique=True) dest_url = db.Column(db.String(128), unique=True) blog_title = db.Column(db.String(64)) blog_description = db.Column(db.String(256)) date = db.Column(db.String(10)) # to store yyyy-mm-dd def __repr__(self): return str(dict( id=self.id, src_url=self.src_url, dest_url=self.dest_url, blog_title=self.blog_title, blog_description=self.blog_description, date=self.date, )) db.create_all() ## Instruction: Fix for tests failing because of persisted database file for the next build ## Code After: import os from sqlalchemy_wrapper import SQLAlchemy from constants import DATABASE_NAME os.system('rm -f ' + DATABASE_NAME) db = SQLAlchemy(uri='sqlite:///' + DATABASE_NAME) class BlogPostModel(db.Model): __tablename__ = 'blog_post' id = db.Column(db.Integer, primary_key=True, autoincrement=True) src_url = db.Column(db.String(128), unique=True) dest_url = db.Column(db.String(128), unique=True) blog_title = db.Column(db.String(64)) blog_description = db.Column(db.String(256)) date = db.Column(db.String(10)) # to store yyyy-mm-dd def __repr__(self): return str(dict( id=self.id, src_url=self.src_url, dest_url=self.dest_url, blog_title=self.blog_title, blog_description=self.blog_description, date=self.date, )) db.create_all()
... import os from sqlalchemy_wrapper import SQLAlchemy from constants import DATABASE_NAME os.system('rm -f ' + DATABASE_NAME) db = SQLAlchemy(uri='sqlite:///' + DATABASE_NAME) class BlogPostModel(db.Model): ...
cf0850e23b07c656bd2bc56c88f9119dc4142931
mooch/banktransfer.py
mooch/banktransfer.py
from django import http from django.conf.urls import url from django.shortcuts import get_object_or_404 from django.template.loader import render_to_string from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from mooch.base import BaseMoocher, require_POST_m from mooch.signals import post_charge class BankTransferMoocher(BaseMoocher): identifier = 'banktransfer' title = _('Pay by bank transfer') def get_urls(self): return [ url('^confirm/$', self.confirm_view, name='banktransfer_confirm'), ] def payment_form(self, request, payment): return render_to_string('mooch/banktransfer_payment_form.html', { 'payment': payment, 'moocher': self, }, request=request) @require_POST_m def confirm_view(self, request): instance = get_object_or_404(self.model, id=request.POST.get('id')) instance.payment_service_provider = self.identifier instance.charged_at = timezone.now() instance.transaction = repr(request.META.copy()) instance.save() post_charge.send( sender=self.__class__, payment=instance, request=request, ) return http.HttpResponseRedirect(self.success_url)
from django import http from django.conf.urls import url from django.shortcuts import get_object_or_404 from django.template.loader import render_to_string from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from mooch.base import BaseMoocher, require_POST_m from mooch.signals import post_charge class BankTransferMoocher(BaseMoocher): identifier = 'banktransfer' title = _('Pay by bank transfer') def __init__(self, *, autocharge, **kw): self.autocharge = autocharge super(BankTransferMoocher, self).__init__(**kw) def get_urls(self): return [ url('^confirm/$', self.confirm_view, name='banktransfer_confirm'), ] def payment_form(self, request, payment): return render_to_string('mooch/banktransfer_payment_form.html', { 'payment': payment, 'moocher': self, }, request=request) @require_POST_m def confirm_view(self, request): instance = get_object_or_404(self.model, id=request.POST.get('id')) instance.payment_service_provider = self.identifier if self.autocharge: instance.charged_at = timezone.now() instance.transaction = repr(request.META.copy()) instance.save() post_charge.send( sender=self.__class__, payment=instance, request=request, ) return http.HttpResponseRedirect(self.success_url)
Allow disabling the autocharging behavior of the bank transfer moocher
Allow disabling the autocharging behavior of the bank transfer moocher
Python
mit
matthiask/django-mooch,matthiask/django-mooch,matthiask/django-mooch
python
## Code Before: from django import http from django.conf.urls import url from django.shortcuts import get_object_or_404 from django.template.loader import render_to_string from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from mooch.base import BaseMoocher, require_POST_m from mooch.signals import post_charge class BankTransferMoocher(BaseMoocher): identifier = 'banktransfer' title = _('Pay by bank transfer') def get_urls(self): return [ url('^confirm/$', self.confirm_view, name='banktransfer_confirm'), ] def payment_form(self, request, payment): return render_to_string('mooch/banktransfer_payment_form.html', { 'payment': payment, 'moocher': self, }, request=request) @require_POST_m def confirm_view(self, request): instance = get_object_or_404(self.model, id=request.POST.get('id')) instance.payment_service_provider = self.identifier instance.charged_at = timezone.now() instance.transaction = repr(request.META.copy()) instance.save() post_charge.send( sender=self.__class__, payment=instance, request=request, ) return http.HttpResponseRedirect(self.success_url) ## Instruction: Allow disabling the autocharging behavior of the bank transfer moocher ## Code After: from django import http from django.conf.urls import url from django.shortcuts import get_object_or_404 from django.template.loader import render_to_string from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from mooch.base import BaseMoocher, require_POST_m from mooch.signals import post_charge class BankTransferMoocher(BaseMoocher): identifier = 'banktransfer' title = _('Pay by bank transfer') def __init__(self, *, autocharge, **kw): self.autocharge = autocharge super(BankTransferMoocher, self).__init__(**kw) def get_urls(self): return [ url('^confirm/$', self.confirm_view, name='banktransfer_confirm'), ] def payment_form(self, request, payment): return render_to_string('mooch/banktransfer_payment_form.html', { 'payment': payment, 'moocher': self, }, request=request) @require_POST_m def confirm_view(self, request): instance = get_object_or_404(self.model, id=request.POST.get('id')) instance.payment_service_provider = self.identifier if self.autocharge: instance.charged_at = timezone.now() instance.transaction = repr(request.META.copy()) instance.save() post_charge.send( sender=self.__class__, payment=instance, request=request, ) return http.HttpResponseRedirect(self.success_url)
// ... existing code ... class BankTransferMoocher(BaseMoocher): identifier = 'banktransfer' title = _('Pay by bank transfer') def __init__(self, *, autocharge, **kw): self.autocharge = autocharge super(BankTransferMoocher, self).__init__(**kw) def get_urls(self): return [ // ... modified code ... def confirm_view(self, request): instance = get_object_or_404(self.model, id=request.POST.get('id')) instance.payment_service_provider = self.identifier if self.autocharge: instance.charged_at = timezone.now() instance.transaction = repr(request.META.copy()) instance.save() // ... rest of the code ...
a0e835cbf382cb55ff872bb8d6cc57a5326a82de
ckanext/ckanext-apicatalog_scheming/ckanext/apicatalog_scheming/validators.py
ckanext/ckanext-apicatalog_scheming/ckanext/apicatalog_scheming/validators.py
from ckan.common import _ import ckan.lib.navl.dictization_functions as df def lower_if_exists(s): return s.lower() if s else s def upper_if_exists(s): return s.upper() if s else s def valid_resources(private, context): package = context.get('package') if not private or private == u'False': for resource in package.resources: if resource.extras.get('valid_content') == 'no': raise df.Invalid(_("Package contains invalid resources")) return private
from ckan.common import _ import ckan.lib.navl.dictization_functions as df def lower_if_exists(s): return s.lower() if s else s def upper_if_exists(s): return s.upper() if s else s def valid_resources(private, context): package = context.get('package') if package and (not private or private == u'False'): for resource in package.resources: if resource.extras.get('valid_content') == 'no': raise df.Invalid(_("Package contains invalid resources")) return private
Fix package resource validator for new packages
LK-271: Fix package resource validator for new packages
Python
mit
vrk-kpa/api-catalog,vrk-kpa/api-catalog,vrk-kpa/api-catalog,vrk-kpa/api-catalog
python
## Code Before: from ckan.common import _ import ckan.lib.navl.dictization_functions as df def lower_if_exists(s): return s.lower() if s else s def upper_if_exists(s): return s.upper() if s else s def valid_resources(private, context): package = context.get('package') if not private or private == u'False': for resource in package.resources: if resource.extras.get('valid_content') == 'no': raise df.Invalid(_("Package contains invalid resources")) return private ## Instruction: LK-271: Fix package resource validator for new packages ## Code After: from ckan.common import _ import ckan.lib.navl.dictization_functions as df def lower_if_exists(s): return s.lower() if s else s def upper_if_exists(s): return s.upper() if s else s def valid_resources(private, context): package = context.get('package') if package and (not private or private == u'False'): for resource in package.resources: if resource.extras.get('valid_content') == 'no': raise df.Invalid(_("Package contains invalid resources")) return private
... def valid_resources(private, context): package = context.get('package') if package and (not private or private == u'False'): for resource in package.resources: if resource.extras.get('valid_content') == 'no': raise df.Invalid(_("Package contains invalid resources")) ...
8d544103d08b17a48dc9d424db4498184e10d8a3
tweepy/asynchronous/__init__.py
tweepy/asynchronous/__init__.py
try: import aiohttp import oauthlib except ModuleNotFoundError: from tweepy.errors import TweepyException raise TweepyException( "tweepy.asynchronous requires aiohttp and oauthlib to be installed" ) from tweepy.asynchronous.streaming import AsyncStream from tweepy.asynchronous.client import AsyncClient
try: import aiohttp import async_lru import oauthlib except ModuleNotFoundError: from tweepy.errors import TweepyException raise TweepyException( "tweepy.asynchronous requires aiohttp, async_lru, and oauthlib to be " "installed" ) from tweepy.asynchronous.streaming import AsyncStream from tweepy.asynchronous.client import AsyncClient
Check for async_lru when importing asynchronous subpackage
Check for async_lru when importing asynchronous subpackage
Python
mit
svven/tweepy,tweepy/tweepy
python
## Code Before: try: import aiohttp import oauthlib except ModuleNotFoundError: from tweepy.errors import TweepyException raise TweepyException( "tweepy.asynchronous requires aiohttp and oauthlib to be installed" ) from tweepy.asynchronous.streaming import AsyncStream from tweepy.asynchronous.client import AsyncClient ## Instruction: Check for async_lru when importing asynchronous subpackage ## Code After: try: import aiohttp import async_lru import oauthlib except ModuleNotFoundError: from tweepy.errors import TweepyException raise TweepyException( "tweepy.asynchronous requires aiohttp, async_lru, and oauthlib to be " "installed" ) from tweepy.asynchronous.streaming import AsyncStream from tweepy.asynchronous.client import AsyncClient
# ... existing code ... try: import aiohttp import async_lru import oauthlib except ModuleNotFoundError: from tweepy.errors import TweepyException raise TweepyException( "tweepy.asynchronous requires aiohttp, async_lru, and oauthlib to be " "installed" ) from tweepy.asynchronous.streaming import AsyncStream # ... rest of the code ...
094e6624ffd9ed6bdf0a0ca3e30c39003bd2204b
cream-it/cream-it-entity/src/test/java/fr/liglab/adele/cream/it/entity/test/EntityBaseCommonConfig.java
cream-it/cream-it-entity/src/test/java/fr/liglab/adele/cream/it/entity/test/EntityBaseCommonConfig.java
package fr.liglab.adele.cream.it.entity.test; import fr.liglab.adele.cream.testing.helpers.ContextBaseTest; import java.util.Arrays; import java.util.List; /** * Created by aygalinc on 26/08/16. */ public abstract class EntityBaseCommonConfig extends ContextBaseTest { @Override protected List<String> getExtraExports() { return Arrays.asList( "fr.liglab.adele.cream.it.entity.synchronisation" //By convention services package are exported by the base test ); } @Override public boolean deployTestBundle() { return true; } }
package fr.liglab.adele.cream.it.entity.test; import fr.liglab.adele.cream.testing.helpers.ContextBaseTest; import java.util.Arrays; import java.util.List; /** * Created by aygalinc on 26/08/16. */ public abstract class EntityBaseCommonConfig extends ContextBaseTest { @Override protected List<String> getExtraExports() { if ("Linux".equalsIgnoreCase(System.getProperty("os.name"))){ return Arrays.asList( "fr.liglab.adele.cream.it.entity.synchronisation" //By convention services package are exported by the base test ); } else { return Arrays.asList( "fr.liglab.adele.cream.it.entity.synchronisation", "fr.liglab.adele.cream.it.entity.services" //By convention services package are exported by the base test but FAILED on windows.... ); } } @Override public boolean deployTestBundle() { return true; } }
Add extra export package due to windows
Add extra export package due to windows
Java
apache-2.0
aygalinc/Context-SOCM,aygalinc/Context-SOCM
java
## Code Before: package fr.liglab.adele.cream.it.entity.test; import fr.liglab.adele.cream.testing.helpers.ContextBaseTest; import java.util.Arrays; import java.util.List; /** * Created by aygalinc on 26/08/16. */ public abstract class EntityBaseCommonConfig extends ContextBaseTest { @Override protected List<String> getExtraExports() { return Arrays.asList( "fr.liglab.adele.cream.it.entity.synchronisation" //By convention services package are exported by the base test ); } @Override public boolean deployTestBundle() { return true; } } ## Instruction: Add extra export package due to windows ## Code After: package fr.liglab.adele.cream.it.entity.test; import fr.liglab.adele.cream.testing.helpers.ContextBaseTest; import java.util.Arrays; import java.util.List; /** * Created by aygalinc on 26/08/16. */ public abstract class EntityBaseCommonConfig extends ContextBaseTest { @Override protected List<String> getExtraExports() { if ("Linux".equalsIgnoreCase(System.getProperty("os.name"))){ return Arrays.asList( "fr.liglab.adele.cream.it.entity.synchronisation" //By convention services package are exported by the base test ); } else { return Arrays.asList( "fr.liglab.adele.cream.it.entity.synchronisation", "fr.liglab.adele.cream.it.entity.services" //By convention services package are exported by the base test but FAILED on windows.... ); } } @Override public boolean deployTestBundle() { return true; } }
# ... existing code ... @Override protected List<String> getExtraExports() { if ("Linux".equalsIgnoreCase(System.getProperty("os.name"))){ return Arrays.asList( "fr.liglab.adele.cream.it.entity.synchronisation" //By convention services package are exported by the base test ); } else { return Arrays.asList( "fr.liglab.adele.cream.it.entity.synchronisation", "fr.liglab.adele.cream.it.entity.services" //By convention services package are exported by the base test but FAILED on windows.... ); } } # ... rest of the code ...
ec884c9db173f093d1398de54d00f1c36f22d8e4
examples/random_valid_test_generator.py
examples/random_valid_test_generator.py
import sys import time from random import shuffle from FairDistributor import FairDistributor def main(): # User input for the number of targets and objects. number_of_targets = int(sys.argv[1]) number_of_objects = int(sys.argv[2]) # Generate dummy lists for objects, targets and dummy matrix for weights targets = ['target_{0}'.format(str(s)) for s in range(number_of_targets)] objects = ['object_{0}'.format(str(s)) for s in range(number_of_objects)] dummy_weights = list(range(1, number_of_objects+1)) weights_matrix = list() for _ in range(number_of_targets): new_random_weight_list = list(dummy_weights) shuffle(new_random_weight_list) weights_matrix.append(new_random_weight_list) start_time = time.time() distributor = FairDistributor(targets, objects, weights_matrix) distributor.distribute() elapsed_time = time.time() - start_time print('Number of Targets: {0}\nNumber of Objects: {1}\nTime elapsed: {2}'.format( number_of_targets, number_of_objects, elapsed_time)) if __name__ == '__main__': main()
import sys import time from random import shuffle from vania.fair_distributor import FairDistributor def main(): # User input for the number of targets and objects. number_of_targets = int(sys.argv[1]) number_of_objects = int(sys.argv[2]) # Generate dummy lists for objects, targets and dummy matrix for weights targets = ['target_{0}'.format(str(s)) for s in range(number_of_targets)] objects = ['object_{0}'.format(str(s)) for s in range(number_of_objects)] dummy_weights = list(range(1, number_of_objects+1)) weights_matrix = list() for _ in range(number_of_targets): new_random_weight_list = list(dummy_weights) shuffle(new_random_weight_list) weights_matrix.append(new_random_weight_list) # Benchmark solver start_time = time.time() distributor = FairDistributor(targets, objects, weights_matrix) distributor.distribute() elapsed_time = time.time() - start_time # Output print('Number of Targets: {0}\nNumber of Objects: {1}\nTime elapsed: {2}'.format( number_of_targets, number_of_objects, elapsed_time)) if __name__ == '__main__': main()
Reformat random generator reformat code
Reformat random generator reformat code
Python
mit
Hackathonners/vania
python
## Code Before: import sys import time from random import shuffle from FairDistributor import FairDistributor def main(): # User input for the number of targets and objects. number_of_targets = int(sys.argv[1]) number_of_objects = int(sys.argv[2]) # Generate dummy lists for objects, targets and dummy matrix for weights targets = ['target_{0}'.format(str(s)) for s in range(number_of_targets)] objects = ['object_{0}'.format(str(s)) for s in range(number_of_objects)] dummy_weights = list(range(1, number_of_objects+1)) weights_matrix = list() for _ in range(number_of_targets): new_random_weight_list = list(dummy_weights) shuffle(new_random_weight_list) weights_matrix.append(new_random_weight_list) start_time = time.time() distributor = FairDistributor(targets, objects, weights_matrix) distributor.distribute() elapsed_time = time.time() - start_time print('Number of Targets: {0}\nNumber of Objects: {1}\nTime elapsed: {2}'.format( number_of_targets, number_of_objects, elapsed_time)) if __name__ == '__main__': main() ## Instruction: Reformat random generator reformat code ## Code After: import sys import time from random import shuffle from vania.fair_distributor import FairDistributor def main(): # User input for the number of targets and objects. number_of_targets = int(sys.argv[1]) number_of_objects = int(sys.argv[2]) # Generate dummy lists for objects, targets and dummy matrix for weights targets = ['target_{0}'.format(str(s)) for s in range(number_of_targets)] objects = ['object_{0}'.format(str(s)) for s in range(number_of_objects)] dummy_weights = list(range(1, number_of_objects+1)) weights_matrix = list() for _ in range(number_of_targets): new_random_weight_list = list(dummy_weights) shuffle(new_random_weight_list) weights_matrix.append(new_random_weight_list) # Benchmark solver start_time = time.time() distributor = FairDistributor(targets, objects, weights_matrix) distributor.distribute() elapsed_time = time.time() - start_time # Output print('Number of Targets: {0}\nNumber of Objects: {1}\nTime elapsed: {2}'.format( number_of_targets, number_of_objects, elapsed_time)) if __name__ == '__main__': main()
... import sys import time from random import shuffle from vania.fair_distributor import FairDistributor def main(): ... # User input for the number of targets and objects. number_of_targets = int(sys.argv[1]) number_of_objects = int(sys.argv[2]) # Generate dummy lists for objects, targets and dummy matrix for weights targets = ['target_{0}'.format(str(s)) for s in range(number_of_targets)] ... new_random_weight_list = list(dummy_weights) shuffle(new_random_weight_list) weights_matrix.append(new_random_weight_list) # Benchmark solver start_time = time.time() distributor = FairDistributor(targets, objects, weights_matrix) distributor.distribute() elapsed_time = time.time() - start_time # Output print('Number of Targets: {0}\nNumber of Objects: {1}\nTime elapsed: {2}'.format( number_of_targets, number_of_objects, elapsed_time)) ...
e16c65ec8c774cc27f9f7aa43e88521c3854b6b7
ella/imports/management/commands/fetchimports.py
ella/imports/management/commands/fetchimports.py
from django.core.management.base import BaseCommand from optparse import make_option class Command(BaseCommand): help = 'Fetch all registered imports' def handle(self, *test_labels, **options): from ella.imports.models import fetch_all fetch_all()
from django.core.management.base import NoArgsCommand from optparse import make_option import sys class Command(NoArgsCommand): help = 'Fetch all registered imports' def handle(self, *test_labels, **options): from ella.imports.models import fetch_all errors = fetch_all() if errors: sys.exit(errors)
Return exit code (count of errors)
Return exit code (count of errors) git-svn-id: 6ce22b13eace8fe533dbb322c2bb0986ea4cd3e6@520 2d143e24-0a30-0410-89d7-a2e95868dc81
Python
bsd-3-clause
MichalMaM/ella,MichalMaM/ella,WhiskeyMedia/ella,whalerock/ella,ella/ella,whalerock/ella,WhiskeyMedia/ella,petrlosa/ella,petrlosa/ella,whalerock/ella
python
## Code Before: from django.core.management.base import BaseCommand from optparse import make_option class Command(BaseCommand): help = 'Fetch all registered imports' def handle(self, *test_labels, **options): from ella.imports.models import fetch_all fetch_all() ## Instruction: Return exit code (count of errors) git-svn-id: 6ce22b13eace8fe533dbb322c2bb0986ea4cd3e6@520 2d143e24-0a30-0410-89d7-a2e95868dc81 ## Code After: from django.core.management.base import NoArgsCommand from optparse import make_option import sys class Command(NoArgsCommand): help = 'Fetch all registered imports' def handle(self, *test_labels, **options): from ella.imports.models import fetch_all errors = fetch_all() if errors: sys.exit(errors)
# ... existing code ... from django.core.management.base import NoArgsCommand from optparse import make_option import sys class Command(NoArgsCommand): help = 'Fetch all registered imports' def handle(self, *test_labels, **options): from ella.imports.models import fetch_all errors = fetch_all() if errors: sys.exit(errors) # ... rest of the code ...
012e5d7d80b20220a7a41f7f3488ebe468d6b661
setup.py
setup.py
from setuptools import setup, find_packages version = '0.2.0' setup( name='cmsplugin-plaintext', version=version, description='Adds a plaintext plugin for django-cms.', author='Xenofox, LLC', author_email='[email protected]', url='http://bitbucket.org/xenofox/cmsplugin-plaintext/', packages=find_packages(), zip_safe=False, include_package_data=True, install_requires=[], )
from setuptools import setup, find_packages version = '0.2.1' setup( name='cmsplugin-plaintext-djangocms3', version=version, description='Adds a plaintext plugin for django-cms. Forked from https://bitbucket.org/xenofox/cmsplugin-plaintext to add django-cms3 support', author='Changer', author_email='[email protected]', url='http://bitbucket.org/changer/cmsplugin-plaintext/', packages=find_packages(), zip_safe=False, include_package_data=True, install_requires=[], )
Change package name for pypi
Change package name for pypi
Python
bsd-3-clause
russmo/cmsplugin-plaintext,russmo/cmsplugin-plaintext
python
## Code Before: from setuptools import setup, find_packages version = '0.2.0' setup( name='cmsplugin-plaintext', version=version, description='Adds a plaintext plugin for django-cms.', author='Xenofox, LLC', author_email='[email protected]', url='http://bitbucket.org/xenofox/cmsplugin-plaintext/', packages=find_packages(), zip_safe=False, include_package_data=True, install_requires=[], ) ## Instruction: Change package name for pypi ## Code After: from setuptools import setup, find_packages version = '0.2.1' setup( name='cmsplugin-plaintext-djangocms3', version=version, description='Adds a plaintext plugin for django-cms. Forked from https://bitbucket.org/xenofox/cmsplugin-plaintext to add django-cms3 support', author='Changer', author_email='[email protected]', url='http://bitbucket.org/changer/cmsplugin-plaintext/', packages=find_packages(), zip_safe=False, include_package_data=True, install_requires=[], )
// ... existing code ... from setuptools import setup, find_packages version = '0.2.1' setup( name='cmsplugin-plaintext-djangocms3', version=version, description='Adds a plaintext plugin for django-cms. Forked from https://bitbucket.org/xenofox/cmsplugin-plaintext to add django-cms3 support', author='Changer', author_email='[email protected]', url='http://bitbucket.org/changer/cmsplugin-plaintext/', packages=find_packages(), zip_safe=False, include_package_data=True, // ... rest of the code ...
75488d5639819f31b01f76433b82d6259323230d
unix-cpy/mpconfigport.h
unix-cpy/mpconfigport.h
// options to control how Micro Python is built #define MICROPY_EMIT_CPYTHON (1) #define MICROPY_ENABLE_LEXER_UNIX (1) #define MICROPY_ENABLE_FLOAT (1) // type definitions for the specific machine #ifdef __LP64__ typedef long machine_int_t; // must be pointer size typedef unsigned long machine_uint_t; // must be pointer size #else // These are definitions for machines where sizeof(int) == sizeof(void*), // regardless for actual size. typedef int machine_int_t; // must be pointer size typedef unsigned int machine_uint_t; // must be pointer size #endif #define BYTES_PER_WORD sizeof(machine_int_t) typedef void *machine_ptr_t; // must be of pointer size typedef const void *machine_const_ptr_t; // must be of pointer size typedef double machine_float_t; machine_float_t machine_sqrt(machine_float_t x);
// options to control how Micro Python is built #define MICROPY_EMIT_CPYTHON (1) #define MICROPY_ENABLE_LEXER_UNIX (1) #define MICROPY_FLOAT_IMPL (MICROPY_FLOAT_IMPL_DOUBLE) // type definitions for the specific machine #ifdef __LP64__ typedef long machine_int_t; // must be pointer size typedef unsigned long machine_uint_t; // must be pointer size #else // These are definitions for machines where sizeof(int) == sizeof(void*), // regardless for actual size. typedef int machine_int_t; // must be pointer size typedef unsigned int machine_uint_t; // must be pointer size #endif #define BYTES_PER_WORD sizeof(machine_int_t) typedef void *machine_ptr_t; // must be of pointer size typedef const void *machine_const_ptr_t; // must be of pointer size typedef double machine_float_t; machine_float_t machine_sqrt(machine_float_t x);
Switch to use MICROPY_FLOAT_IMPL config define.
unix-cpy: Switch to use MICROPY_FLOAT_IMPL config define.
C
mit
micropython/micropython-esp32,SHA2017-badge/micropython-esp32,torwag/micropython,AriZuu/micropython,xhat/micropython,utopiaprince/micropython,AriZuu/micropython,ahotam/micropython,heisewangluo/micropython,firstval/micropython,hiway/micropython,cloudformdesign/micropython,mianos/micropython,kostyll/micropython,Timmenem/micropython,xuxiaoxin/micropython,matthewelse/micropython,rubencabrera/micropython,Vogtinator/micropython,bvernoux/micropython,infinnovation/micropython,deshipu/micropython,cnoviello/micropython,henriknelson/micropython,omtinez/micropython,adamkh/micropython,lowRISC/micropython,mgyenik/micropython,martinribelotta/micropython,infinnovation/micropython,kerneltask/micropython,trezor/micropython,dxxb/micropython,ryannathans/micropython,mianos/micropython,blazewicz/micropython,tuc-osg/micropython,methoxid/micropystat,lowRISC/micropython,rubencabrera/micropython,utopiaprince/micropython,ericsnowcurrently/micropython,pfalcon/micropython,torwag/micropython,ganshun666/micropython,xyb/micropython,blmorris/micropython,hosaka/micropython,adafruit/micropython,firstval/micropython,ChuckM/micropython,feilongfl/micropython,orionrobots/micropython,SungEun-Steve-Kim/test-mp,paul-xxx/micropython,adamkh/micropython,Peetz0r/micropython-esp32,ceramos/micropython,aitjcize/micropython,martinribelotta/micropython,micropython/micropython-esp32,KISSMonX/micropython,rubencabrera/micropython,dmazzella/micropython,mpalomer/micropython,supergis/micropython,ceramos/micropython,ChuckM/micropython,danicampora/micropython,firstval/micropython,cwyark/micropython,dhylands/micropython,slzatz/micropython,Timmenem/micropython,ceramos/micropython,noahwilliamsson/micropython,cwyark/micropython,stonegithubs/micropython,suda/micropython,mpalomer/micropython,ChuckM/micropython,blmorris/micropython,MrSurly/micropython-esp32,methoxid/micropystat,blazewicz/micropython,oopy/micropython,EcmaXp/micropython,AriZuu/micropython,dmazzella/micropython,feilongfl/micropython,bvernoux/micropython,jlillest/micropython,tdautc19841202/micropython,jlillest/micropython,trezor/micropython,stonegithubs/micropython,redbear/micropython,hosaka/micropython,ganshun666/micropython,warner83/micropython,neilh10/micropython,lbattraw/micropython,ahotam/micropython,skybird6672/micropython,bvernoux/micropython,HenrikSolver/micropython,oopy/micropython,kostyll/micropython,aethaniel/micropython,galenhz/micropython,MrSurly/micropython-esp32,paul-xxx/micropython,torwag/micropython,matthewelse/micropython,vriera/micropython,PappaPeppar/micropython,pozetroninc/micropython,selste/micropython,MrSurly/micropython,ruffy91/micropython,tuc-osg/micropython,jmarcelino/pycom-micropython,danicampora/micropython,chrisdearman/micropython,heisewangluo/micropython,ernesto-g/micropython,praemdonck/micropython,swegener/micropython,ahotam/micropython,pfalcon/micropython,alex-robbins/micropython,redbear/micropython,drrk/micropython,infinnovation/micropython,cloudformdesign/micropython,henriknelson/micropython,Vogtinator/micropython,adamkh/micropython,torwag/micropython,TDAbboud/micropython,dinau/micropython,turbinenreiter/micropython,martinribelotta/micropython,adafruit/micropython,redbear/micropython,hosaka/micropython,SungEun-Steve-Kim/test-mp,MrSurly/micropython-esp32,SungEun-Steve-Kim/test-mp,dmazzella/micropython,adafruit/circuitpython,tralamazza/micropython,henriknelson/micropython,jimkmc/micropython,SHA2017-badge/micropython-esp32,mpalomer/micropython,methoxid/micropystat,KISSMonX/micropython,jlillest/micropython,galenhz/micropython,alex-march/micropython,tobbad/micropython,kerneltask/micropython,drrk/micropython,aethaniel/micropython,dinau/micropython,pramasoul/micropython,alex-march/micropython,tobbad/micropython,xuxiaoxin/micropython,vitiral/micropython,deshipu/micropython,alex-robbins/micropython,selste/micropython,Timmenem/micropython,tdautc19841202/micropython,mgyenik/micropython,feilongfl/micropython,lowRISC/micropython,ryannathans/micropython,cwyark/micropython,micropython/micropython-esp32,EcmaXp/micropython,noahwilliamsson/micropython,mhoffma/micropython,ahotam/micropython,adafruit/circuitpython,skybird6672/micropython,mianos/micropython,PappaPeppar/micropython,ernesto-g/micropython,omtinez/micropython,aitjcize/micropython,vitiral/micropython,chrisdearman/micropython,heisewangluo/micropython,cnoviello/micropython,swegener/micropython,ernesto-g/micropython,ryannathans/micropython,mpalomer/micropython,rubencabrera/micropython,mianos/micropython,mgyenik/micropython,emfcamp/micropython,neilh10/micropython,trezor/micropython,lowRISC/micropython,ericsnowcurrently/micropython,slzatz/micropython,xhat/micropython,jmarcelino/pycom-micropython,AriZuu/micropython,ruffy91/micropython,galenhz/micropython,ganshun666/micropython,dxxb/micropython,mpalomer/micropython,toolmacher/micropython,jimkmc/micropython,supergis/micropython,Vogtinator/micropython,pfalcon/micropython,ruffy91/micropython,bvernoux/micropython,omtinez/micropython,PappaPeppar/micropython,swegener/micropython,deshipu/micropython,paul-xxx/micropython,mhoffma/micropython,matthewelse/micropython,vriera/micropython,vitiral/micropython,ryannathans/micropython,xhat/micropython,alex-march/micropython,lbattraw/micropython,jmarcelino/pycom-micropython,tuc-osg/micropython,drrk/micropython,firstval/micropython,blazewicz/micropython,tdautc19841202/micropython,tralamazza/micropython,warner83/micropython,ernesto-g/micropython,misterdanb/micropython,aitjcize/micropython,deshipu/micropython,dhylands/micropython,xuxiaoxin/micropython,matthewelse/micropython,PappaPeppar/micropython,MrSurly/micropython,xyb/micropython,AriZuu/micropython,xyb/micropython,alex-robbins/micropython,puuu/micropython,ernesto-g/micropython,aethaniel/micropython,ganshun666/micropython,feilongfl/micropython,dhylands/micropython,tdautc19841202/micropython,MrSurly/micropython,suda/micropython,emfcamp/micropython,turbinenreiter/micropython,PappaPeppar/micropython,noahchense/micropython,MrSurly/micropython,dinau/micropython,warner83/micropython,noahchense/micropython,henriknelson/micropython,tobbad/micropython,noahchense/micropython,adamkh/micropython,dxxb/micropython,trezor/micropython,orionrobots/micropython,ceramos/micropython,misterdanb/micropython,adafruit/micropython,hosaka/micropython,infinnovation/micropython,martinribelotta/micropython,turbinenreiter/micropython,noahchense/micropython,SungEun-Steve-Kim/test-mp,cwyark/micropython,selste/micropython,dxxb/micropython,jlillest/micropython,tobbad/micropython,omtinez/micropython,ryannathans/micropython,dhylands/micropython,ericsnowcurrently/micropython,emfcamp/micropython,galenhz/micropython,swegener/micropython,feilongfl/micropython,adamkh/micropython,emfcamp/micropython,danicampora/micropython,pramasoul/micropython,vriera/micropython,praemdonck/micropython,dmazzella/micropython,xyb/micropython,noahwilliamsson/micropython,kostyll/micropython,suda/micropython,hiway/micropython,drrk/micropython,danicampora/micropython,alex-march/micropython,heisewangluo/micropython,tuc-osg/micropython,adafruit/micropython,pozetroninc/micropython,ruffy91/micropython,alex-robbins/micropython,ceramos/micropython,KISSMonX/micropython,chrisdearman/micropython,MrSurly/micropython-esp32,HenrikSolver/micropython,vriera/micropython,cloudformdesign/micropython,aethaniel/micropython,misterdanb/micropython,TDAbboud/micropython,tralamazza/micropython,turbinenreiter/micropython,orionrobots/micropython,firstval/micropython,pfalcon/micropython,praemdonck/micropython,misterdanb/micropython,dinau/micropython,SungEun-Steve-Kim/test-mp,supergis/micropython,Timmenem/micropython,chrisdearman/micropython,deshipu/micropython,misterdanb/micropython,blazewicz/micropython,skybird6672/micropython,mhoffma/micropython,warner83/micropython,adafruit/micropython,micropython/micropython-esp32,adafruit/circuitpython,suda/micropython,turbinenreiter/micropython,SHA2017-badge/micropython-esp32,neilh10/micropython,kerneltask/micropython,methoxid/micropystat,lbattraw/micropython,ruffy91/micropython,redbear/micropython,KISSMonX/micropython,bvernoux/micropython,jmarcelino/pycom-micropython,dhylands/micropython,kostyll/micropython,mianos/micropython,neilh10/micropython,toolmacher/micropython,lowRISC/micropython,alex-robbins/micropython,HenrikSolver/micropython,heisewangluo/micropython,supergis/micropython,selste/micropython,warner83/micropython,praemdonck/micropython,utopiaprince/micropython,EcmaXp/micropython,micropython/micropython-esp32,noahwilliamsson/micropython,mhoffma/micropython,blmorris/micropython,pramasoul/micropython,kerneltask/micropython,xuxiaoxin/micropython,henriknelson/micropython,slzatz/micropython,rubencabrera/micropython,cwyark/micropython,chrisdearman/micropython,galenhz/micropython,jmarcelino/pycom-micropython,orionrobots/micropython,oopy/micropython,ChuckM/micropython,suda/micropython,Timmenem/micropython,Vogtinator/micropython,puuu/micropython,HenrikSolver/micropython,Peetz0r/micropython-esp32,toolmacher/micropython,oopy/micropython,neilh10/micropython,pozetroninc/micropython,adafruit/circuitpython,jimkmc/micropython,jlillest/micropython,paul-xxx/micropython,swegener/micropython,lbattraw/micropython,cloudformdesign/micropython,hiway/micropython,emfcamp/micropython,EcmaXp/micropython,cnoviello/micropython,trezor/micropython,lbattraw/micropython,Peetz0r/micropython-esp32,xuxiaoxin/micropython,pramasoul/micropython,drrk/micropython,puuu/micropython,dinau/micropython,MrSurly/micropython,Peetz0r/micropython-esp32,cloudformdesign/micropython,vitiral/micropython,blmorris/micropython,jimkmc/micropython,hosaka/micropython,omtinez/micropython,matthewelse/micropython,cnoviello/micropython,hiway/micropython,aitjcize/micropython,methoxid/micropystat,tdautc19841202/micropython,ahotam/micropython,cnoviello/micropython,SHA2017-badge/micropython-esp32,SHA2017-badge/micropython-esp32,mhoffma/micropython,martinribelotta/micropython,matthewelse/micropython,noahwilliamsson/micropython,oopy/micropython,ChuckM/micropython,puuu/micropython,KISSMonX/micropython,tuc-osg/micropython,mgyenik/micropython,xyb/micropython,adafruit/circuitpython,praemdonck/micropython,ericsnowcurrently/micropython,redbear/micropython,HenrikSolver/micropython,noahchense/micropython,utopiaprince/micropython,aethaniel/micropython,orionrobots/micropython,dxxb/micropython,mgyenik/micropython,utopiaprince/micropython,slzatz/micropython,hiway/micropython,pramasoul/micropython,toolmacher/micropython,puuu/micropython,vitiral/micropython,blazewicz/micropython,danicampora/micropython,supergis/micropython,blmorris/micropython,ganshun666/micropython,TDAbboud/micropython,Vogtinator/micropython,tobbad/micropython,Peetz0r/micropython-esp32,stonegithubs/micropython,xhat/micropython,torwag/micropython,TDAbboud/micropython,toolmacher/micropython,jimkmc/micropython,pfalcon/micropython,pozetroninc/micropython,adafruit/circuitpython,TDAbboud/micropython,ericsnowcurrently/micropython,MrSurly/micropython-esp32,kostyll/micropython,paul-xxx/micropython,infinnovation/micropython,skybird6672/micropython,vriera/micropython,alex-march/micropython,stonegithubs/micropython,EcmaXp/micropython,xhat/micropython,selste/micropython,tralamazza/micropython,slzatz/micropython,kerneltask/micropython,skybird6672/micropython,stonegithubs/micropython,pozetroninc/micropython
c
## Code Before: // options to control how Micro Python is built #define MICROPY_EMIT_CPYTHON (1) #define MICROPY_ENABLE_LEXER_UNIX (1) #define MICROPY_ENABLE_FLOAT (1) // type definitions for the specific machine #ifdef __LP64__ typedef long machine_int_t; // must be pointer size typedef unsigned long machine_uint_t; // must be pointer size #else // These are definitions for machines where sizeof(int) == sizeof(void*), // regardless for actual size. typedef int machine_int_t; // must be pointer size typedef unsigned int machine_uint_t; // must be pointer size #endif #define BYTES_PER_WORD sizeof(machine_int_t) typedef void *machine_ptr_t; // must be of pointer size typedef const void *machine_const_ptr_t; // must be of pointer size typedef double machine_float_t; machine_float_t machine_sqrt(machine_float_t x); ## Instruction: unix-cpy: Switch to use MICROPY_FLOAT_IMPL config define. ## Code After: // options to control how Micro Python is built #define MICROPY_EMIT_CPYTHON (1) #define MICROPY_ENABLE_LEXER_UNIX (1) #define MICROPY_FLOAT_IMPL (MICROPY_FLOAT_IMPL_DOUBLE) // type definitions for the specific machine #ifdef __LP64__ typedef long machine_int_t; // must be pointer size typedef unsigned long machine_uint_t; // must be pointer size #else // These are definitions for machines where sizeof(int) == sizeof(void*), // regardless for actual size. typedef int machine_int_t; // must be pointer size typedef unsigned int machine_uint_t; // must be pointer size #endif #define BYTES_PER_WORD sizeof(machine_int_t) typedef void *machine_ptr_t; // must be of pointer size typedef const void *machine_const_ptr_t; // must be of pointer size typedef double machine_float_t; machine_float_t machine_sqrt(machine_float_t x);
... #define MICROPY_EMIT_CPYTHON (1) #define MICROPY_ENABLE_LEXER_UNIX (1) #define MICROPY_FLOAT_IMPL (MICROPY_FLOAT_IMPL_DOUBLE) // type definitions for the specific machine ...
24c686521b4cb6e1d3cb7537322102faa660f705
app/src/main/java/de/christinecoenen/code/zapp/app/mediathek/controller/DownloadReceiver.java
app/src/main/java/de/christinecoenen/code/zapp/app/mediathek/controller/DownloadReceiver.java
package de.christinecoenen.code.zapp.app.mediathek.controller; import android.app.DownloadManager; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; public class DownloadReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { String action = intent.getAction(); if (DownloadManager.ACTION_NOTIFICATION_CLICKED.equals(action)) { context.startActivity(new Intent(DownloadManager.ACTION_VIEW_DOWNLOADS)); } } }
package de.christinecoenen.code.zapp.app.mediathek.controller; import android.app.DownloadManager; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; public class DownloadReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { String action = intent.getAction(); if (DownloadManager.ACTION_NOTIFICATION_CLICKED.equals(action)) { Intent downloadManagerIntent = new Intent(DownloadManager.ACTION_VIEW_DOWNLOADS); downloadManagerIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); context.startActivity(downloadManagerIntent); } } }
Fix crash on api level 23 when clicking download notification
Fix crash on api level 23 when clicking download notification
Java
mit
cemrich/zapp
java
## Code Before: package de.christinecoenen.code.zapp.app.mediathek.controller; import android.app.DownloadManager; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; public class DownloadReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { String action = intent.getAction(); if (DownloadManager.ACTION_NOTIFICATION_CLICKED.equals(action)) { context.startActivity(new Intent(DownloadManager.ACTION_VIEW_DOWNLOADS)); } } } ## Instruction: Fix crash on api level 23 when clicking download notification ## Code After: package de.christinecoenen.code.zapp.app.mediathek.controller; import android.app.DownloadManager; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; public class DownloadReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { String action = intent.getAction(); if (DownloadManager.ACTION_NOTIFICATION_CLICKED.equals(action)) { Intent downloadManagerIntent = new Intent(DownloadManager.ACTION_VIEW_DOWNLOADS); downloadManagerIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); context.startActivity(downloadManagerIntent); } } }
... String action = intent.getAction(); if (DownloadManager.ACTION_NOTIFICATION_CLICKED.equals(action)) { Intent downloadManagerIntent = new Intent(DownloadManager.ACTION_VIEW_DOWNLOADS); downloadManagerIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); context.startActivity(downloadManagerIntent); } } } ...
c10120f701ad1951eddbbe85b5dd06579294a980
MediaManagementSystem/src/main/java/com/csci4448/MediaManagementSystem/model/media/MediaDAO.java
MediaManagementSystem/src/main/java/com/csci4448/MediaManagementSystem/model/media/MediaDAO.java
package com.csci4448.MediaManagementSystem.model.media; import org.hibernate.HibernateException; import org.hibernate.Session; import org.hibernate.Transaction; import org.hibernate.SessionFactory; import org.hibernate.cfg.Configuration; public class MediaDAO { private SessionFactory sessionFactory; public MediaDAO() { sessionFactory = new Configuration().configure().buildSessionFactory(); } }
package com.csci4448.MediaManagementSystem.model.media; import org.hibernate.HibernateException; import org.hibernate.Session; import org.hibernate.Transaction; import org.hibernate.SessionFactory; import org.hibernate.cfg.Configuration; public class MediaDAO { private SessionFactory sessionFactory; public MediaDAO() { sessionFactory = new Configuration().configure().buildSessionFactory(); } //ToDo: System inventory stuff, basic add/edit/delete of media, and waitlist of rentable media }
Add ToDo stuff for media DAO
Add ToDo stuff for media DAO
Java
mit
tylersco/MediaManagementSystem
java
## Code Before: package com.csci4448.MediaManagementSystem.model.media; import org.hibernate.HibernateException; import org.hibernate.Session; import org.hibernate.Transaction; import org.hibernate.SessionFactory; import org.hibernate.cfg.Configuration; public class MediaDAO { private SessionFactory sessionFactory; public MediaDAO() { sessionFactory = new Configuration().configure().buildSessionFactory(); } } ## Instruction: Add ToDo stuff for media DAO ## Code After: package com.csci4448.MediaManagementSystem.model.media; import org.hibernate.HibernateException; import org.hibernate.Session; import org.hibernate.Transaction; import org.hibernate.SessionFactory; import org.hibernate.cfg.Configuration; public class MediaDAO { private SessionFactory sessionFactory; public MediaDAO() { sessionFactory = new Configuration().configure().buildSessionFactory(); } //ToDo: System inventory stuff, basic add/edit/delete of media, and waitlist of rentable media }
# ... existing code ... sessionFactory = new Configuration().configure().buildSessionFactory(); } //ToDo: System inventory stuff, basic add/edit/delete of media, and waitlist of rentable media } # ... rest of the code ...
5abb4d9b5bfe88e9617839f5558e5b31dbf02f5b
19-getBlockHits.py
19-getBlockHits.py
from mcpi.minecraft import * from mcpi.block import * from blockData import * # this means that the file can be imported without executing anything in this code block if __name__ == "__main__": """ First thing you do is create a connection to minecraft This is like dialling a phone. It sets up a communication line between your script and the minecraft world """ # Create a connection to Minecraft # Any communication with the world must use this object mc = Minecraft.create() # Get the current tile/block that the player is located at in the world playerPosition = mc.player.getTilePos() while(True): hits = mc.events.pollBlockHits() if len(hits) > 0: print hits
from mcpi.minecraft import * from mcpi.block import * from blockData import * # this means that the file can be imported without executing anything in this code block if __name__ == "__main__": """ First thing you do is create a connection to minecraft This is like dialling a phone. It sets up a communication line between your script and the minecraft world """ # Create a connection to Minecraft # Any communication with the world must use this object mc = Minecraft.create() while(True): hits = mc.events.pollBlockHits() if len(hits) > 0: print hits
Remove code that is not used
Remove code that is not used Function call not required so removed
Python
bsd-3-clause
hashbangstudio/Python-Minecraft-Examples
python
## Code Before: from mcpi.minecraft import * from mcpi.block import * from blockData import * # this means that the file can be imported without executing anything in this code block if __name__ == "__main__": """ First thing you do is create a connection to minecraft This is like dialling a phone. It sets up a communication line between your script and the minecraft world """ # Create a connection to Minecraft # Any communication with the world must use this object mc = Minecraft.create() # Get the current tile/block that the player is located at in the world playerPosition = mc.player.getTilePos() while(True): hits = mc.events.pollBlockHits() if len(hits) > 0: print hits ## Instruction: Remove code that is not used Function call not required so removed ## Code After: from mcpi.minecraft import * from mcpi.block import * from blockData import * # this means that the file can be imported without executing anything in this code block if __name__ == "__main__": """ First thing you do is create a connection to minecraft This is like dialling a phone. It sets up a communication line between your script and the minecraft world """ # Create a connection to Minecraft # Any communication with the world must use this object mc = Minecraft.create() while(True): hits = mc.events.pollBlockHits() if len(hits) > 0: print hits
# ... existing code ... # Any communication with the world must use this object mc = Minecraft.create() while(True): hits = mc.events.pollBlockHits() if len(hits) > 0: # ... rest of the code ...
9698919d35fe7ffdc9e4e4e88de50692d115be10
Sources/include/HTMLNode+Private.h
Sources/include/HTMLNode+Private.h
// // HTMLNode+Private.h // HTMLKit // // Created by Iska on 20/12/15. // Copyright © 2015 BrainCookie. All rights reserved. // ///------------------------------------------------------ /// HTMLKit private header ///------------------------------------------------------ #import <HTMLKit/HTMLKit.h> /** Private HTML Node methods which are not intended for public API. */ @interface HTMLNode () /** A read-write redeclaration of the same property in the public API. */ @property (nonatomic, weak) HTMLDocument *ownerDocument; /** A read-write redeclaration of the same property in the public API. */ @property (nonatomic, weak) HTMLNode *parentNode; /** Designated initializer of the HTML Node, which, however, should not be used directly. It is intended to be called only by subclasses. @abstract Use concrete subclasses of the HTML Node. @param name The node's name. @param type The node's type. @return A new instance of a HTML Node. */ - (instancetype)initWithName:(NSString *)name type:(HTMLNodeType)type NS_DESIGNATED_INITIALIZER; /** Casts this node to a HTML Element. This cast should only be performed after the appropriate check. */ - (HTMLElement *)asElement; /** Returns the same string representation of the DOM tree rooted at this node that is used by html5lib-tests. @disucssion This method is indended for testing purposes. */ - (NSString *)treeDescription; @end
// // HTMLNode+Private.h // HTMLKit // // Created by Iska on 20/12/15. // Copyright © 2015 BrainCookie. All rights reserved. // ///------------------------------------------------------ /// HTMLKit private header ///------------------------------------------------------ #import "HTMLNode.h" /** Private HTML Node methods which are not intended for public API. */ @interface HTMLNode () /** A read-write redeclaration of the same property in the public API. */ @property (nonatomic, weak) HTMLDocument *ownerDocument; /** A read-write redeclaration of the same property in the public API. */ @property (nonatomic, weak) HTMLNode *parentNode; /** Designated initializer of the HTML Node, which, however, should not be used directly. It is intended to be called only by subclasses. @abstract Use concrete subclasses of the HTML Node. @param name The node's name. @param type The node's type. @return A new instance of a HTML Node. */ - (instancetype)initWithName:(NSString *)name type:(HTMLNodeType)type NS_DESIGNATED_INITIALIZER; /** Casts this node to a HTML Element. This cast should only be performed after the appropriate check. */ - (HTMLElement *)asElement; /** Returns the same string representation of the DOM tree rooted at this node that is used by html5lib-tests. @disucssion This method is indended for testing purposes. */ - (NSString *)treeDescription; @end
Fix import in private HTMLNode header
Fix import in private HTMLNode header
C
mit
iabudiab/HTMLKit,iabudiab/HTMLKit,iabudiab/HTMLKit,iabudiab/HTMLKit,iabudiab/HTMLKit
c
## Code Before: // // HTMLNode+Private.h // HTMLKit // // Created by Iska on 20/12/15. // Copyright © 2015 BrainCookie. All rights reserved. // ///------------------------------------------------------ /// HTMLKit private header ///------------------------------------------------------ #import <HTMLKit/HTMLKit.h> /** Private HTML Node methods which are not intended for public API. */ @interface HTMLNode () /** A read-write redeclaration of the same property in the public API. */ @property (nonatomic, weak) HTMLDocument *ownerDocument; /** A read-write redeclaration of the same property in the public API. */ @property (nonatomic, weak) HTMLNode *parentNode; /** Designated initializer of the HTML Node, which, however, should not be used directly. It is intended to be called only by subclasses. @abstract Use concrete subclasses of the HTML Node. @param name The node's name. @param type The node's type. @return A new instance of a HTML Node. */ - (instancetype)initWithName:(NSString *)name type:(HTMLNodeType)type NS_DESIGNATED_INITIALIZER; /** Casts this node to a HTML Element. This cast should only be performed after the appropriate check. */ - (HTMLElement *)asElement; /** Returns the same string representation of the DOM tree rooted at this node that is used by html5lib-tests. @disucssion This method is indended for testing purposes. */ - (NSString *)treeDescription; @end ## Instruction: Fix import in private HTMLNode header ## Code After: // // HTMLNode+Private.h // HTMLKit // // Created by Iska on 20/12/15. // Copyright © 2015 BrainCookie. All rights reserved. // ///------------------------------------------------------ /// HTMLKit private header ///------------------------------------------------------ #import "HTMLNode.h" /** Private HTML Node methods which are not intended for public API. */ @interface HTMLNode () /** A read-write redeclaration of the same property in the public API. */ @property (nonatomic, weak) HTMLDocument *ownerDocument; /** A read-write redeclaration of the same property in the public API. */ @property (nonatomic, weak) HTMLNode *parentNode; /** Designated initializer of the HTML Node, which, however, should not be used directly. It is intended to be called only by subclasses. @abstract Use concrete subclasses of the HTML Node. @param name The node's name. @param type The node's type. @return A new instance of a HTML Node. */ - (instancetype)initWithName:(NSString *)name type:(HTMLNodeType)type NS_DESIGNATED_INITIALIZER; /** Casts this node to a HTML Element. This cast should only be performed after the appropriate check. */ - (HTMLElement *)asElement; /** Returns the same string representation of the DOM tree rooted at this node that is used by html5lib-tests. @disucssion This method is indended for testing purposes. */ - (NSString *)treeDescription; @end
// ... existing code ... /// HTMLKit private header ///------------------------------------------------------ #import "HTMLNode.h" /** Private HTML Node methods which are not intended for public API. // ... rest of the code ...
a9c53bc97c0e62a959c1115ec61d0a28d71aac68
devtools/ci/update-versions.py
devtools/ci/update-versions.py
from __future__ import print_function import os import boto from boto.s3.key import Key import msmbuilder.version if msmbuilder.version.release: # The secret key is available as a secure environment variable # on travis-ci to push the build documentation to Amazon S3. AWS_ACCESS_KEY_ID = os.environ['AWS_ACCESS_KEY_ID'] AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY'] BUCKET_NAME = 'msmbuilder.org' bucket_name = AWS_ACCESS_KEY_ID.lower() + '-' + BUCKET_NAME conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) bucket = conn.get_bucket(BUCKET_NAME) root = 'doc/_build' versions = json.load(urllib2.urlopen('http://www.msmbuilder.org/versions.json')) # new release so all the others are now old for i in xrange(len(versions)): versions[i]['latest'] = False versions.append({'version' : msmbuilder.version.short_version, 'latest' : True}) k = Key(bucket) k.key = 'versions.json' k.set_contents_from_string(json.dumps(versions)) else: print("This is not a release.")
from __future__ import print_function import os import pip import json from tempfile import NamedTemporaryFile import subprocess from msmbuilder import version from six.moves.urllib.request import urlopen if not any(d.project_name == 's3cmd' for d in pip.get_installed_distributions()): raise ImportError('The s3cmd pacakge is required. try $ pip install s3cmd') URL = 'http://www.msmbuilder.org/versions.json' BUCKET_NAME = 'msmbuilder.org' if not version.release: print("This is not a release.") exit(0) versions = json.load(urlopen(URL)) # new release so all the others are now old for i in xrange(len(versions)): versions[i]['latest'] = False versions.append({ 'version': version.short_version, 'latest': True}) # The secret key is available as a secure environment variable # on travis-ci to push the build documentation to Amazon S3. with NamedTemporaryFile('w') as config, NamedTemporaryFile('w') as v: config.write('''[default] access_key = {AWS_ACCESS_KEY_ID} secret_key = {AWS_SECRET_ACCESS_KEY} '''.format(**os.environ)) json.dump(versions, v) config.flush() v.flush() template = ('s3cmd --config {config} ' 'put {vfile} s3://{bucket}/versions.json') cmd = template.format( config=config.name, vfile=v.name, bucket=BUCKET_NAME) subprocess.call(cmd.split())
Fix script for updating version dropdown
Fix script for updating version dropdown
Python
lgpl-2.1
mpharrigan/mixtape,brookehus/msmbuilder,peastman/msmbuilder,peastman/msmbuilder,rafwiewiora/msmbuilder,dr-nate/msmbuilder,dr-nate/msmbuilder,msmbuilder/msmbuilder,peastman/msmbuilder,Eigenstate/msmbuilder,Eigenstate/msmbuilder,msultan/msmbuilder,msultan/msmbuilder,rmcgibbo/msmbuilder,msmbuilder/msmbuilder,msultan/msmbuilder,cxhernandez/msmbuilder,rmcgibbo/msmbuilder,Eigenstate/msmbuilder,stephenliu1989/msmbuilder,dotsdl/msmbuilder,peastman/msmbuilder,cxhernandez/msmbuilder,stephenliu1989/msmbuilder,rafwiewiora/msmbuilder,mpharrigan/mixtape,rafwiewiora/msmbuilder,brookehus/msmbuilder,rmcgibbo/msmbuilder,mpharrigan/mixtape,Eigenstate/msmbuilder,msultan/msmbuilder,cxhernandez/msmbuilder,cxhernandez/msmbuilder,peastman/msmbuilder,msmbuilder/msmbuilder,rafwiewiora/msmbuilder,dotsdl/msmbuilder,cxhernandez/msmbuilder,msultan/msmbuilder,brookehus/msmbuilder,dotsdl/msmbuilder,mpharrigan/mixtape,dotsdl/msmbuilder,rmcgibbo/msmbuilder,rafwiewiora/msmbuilder,stephenliu1989/msmbuilder,stephenliu1989/msmbuilder,msmbuilder/msmbuilder,dr-nate/msmbuilder,Eigenstate/msmbuilder,mpharrigan/mixtape,dr-nate/msmbuilder,brookehus/msmbuilder,dr-nate/msmbuilder,msmbuilder/msmbuilder,brookehus/msmbuilder
python
## Code Before: from __future__ import print_function import os import boto from boto.s3.key import Key import msmbuilder.version if msmbuilder.version.release: # The secret key is available as a secure environment variable # on travis-ci to push the build documentation to Amazon S3. AWS_ACCESS_KEY_ID = os.environ['AWS_ACCESS_KEY_ID'] AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY'] BUCKET_NAME = 'msmbuilder.org' bucket_name = AWS_ACCESS_KEY_ID.lower() + '-' + BUCKET_NAME conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) bucket = conn.get_bucket(BUCKET_NAME) root = 'doc/_build' versions = json.load(urllib2.urlopen('http://www.msmbuilder.org/versions.json')) # new release so all the others are now old for i in xrange(len(versions)): versions[i]['latest'] = False versions.append({'version' : msmbuilder.version.short_version, 'latest' : True}) k = Key(bucket) k.key = 'versions.json' k.set_contents_from_string(json.dumps(versions)) else: print("This is not a release.") ## Instruction: Fix script for updating version dropdown ## Code After: from __future__ import print_function import os import pip import json from tempfile import NamedTemporaryFile import subprocess from msmbuilder import version from six.moves.urllib.request import urlopen if not any(d.project_name == 's3cmd' for d in pip.get_installed_distributions()): raise ImportError('The s3cmd pacakge is required. try $ pip install s3cmd') URL = 'http://www.msmbuilder.org/versions.json' BUCKET_NAME = 'msmbuilder.org' if not version.release: print("This is not a release.") exit(0) versions = json.load(urlopen(URL)) # new release so all the others are now old for i in xrange(len(versions)): versions[i]['latest'] = False versions.append({ 'version': version.short_version, 'latest': True}) # The secret key is available as a secure environment variable # on travis-ci to push the build documentation to Amazon S3. with NamedTemporaryFile('w') as config, NamedTemporaryFile('w') as v: config.write('''[default] access_key = {AWS_ACCESS_KEY_ID} secret_key = {AWS_SECRET_ACCESS_KEY} '''.format(**os.environ)) json.dump(versions, v) config.flush() v.flush() template = ('s3cmd --config {config} ' 'put {vfile} s3://{bucket}/versions.json') cmd = template.format( config=config.name, vfile=v.name, bucket=BUCKET_NAME) subprocess.call(cmd.split())
... from __future__ import print_function import os import pip import json from tempfile import NamedTemporaryFile import subprocess from msmbuilder import version from six.moves.urllib.request import urlopen if not any(d.project_name == 's3cmd' for d in pip.get_installed_distributions()): raise ImportError('The s3cmd pacakge is required. try $ pip install s3cmd') URL = 'http://www.msmbuilder.org/versions.json' BUCKET_NAME = 'msmbuilder.org' if not version.release: print("This is not a release.") exit(0) versions = json.load(urlopen(URL)) # new release so all the others are now old for i in xrange(len(versions)): versions[i]['latest'] = False versions.append({ 'version': version.short_version, 'latest': True}) # The secret key is available as a secure environment variable # on travis-ci to push the build documentation to Amazon S3. with NamedTemporaryFile('w') as config, NamedTemporaryFile('w') as v: config.write('''[default] access_key = {AWS_ACCESS_KEY_ID} secret_key = {AWS_SECRET_ACCESS_KEY} '''.format(**os.environ)) json.dump(versions, v) config.flush() v.flush() template = ('s3cmd --config {config} ' 'put {vfile} s3://{bucket}/versions.json') cmd = template.format( config=config.name, vfile=v.name, bucket=BUCKET_NAME) subprocess.call(cmd.split()) ...
76efd79df4b996323872dbe48c39d8f21af6a09f
setup.py
setup.py
import setuptools import unittest def discover_tests(): test_loader = unittest.TestLoader() return test_loader.discover('.', pattern='*_test.py') setuptools.setup( name='mesonwrap', version='0.0.4', author='The Meson development team', license='Apache 2', url='https://github.com/mesonbuild/wrapweb', packages=['mesonwrap', 'wrapweb'], scripts=['mesonwrap.py'], test_suite='setup.discover_tests', )
import setuptools import unittest def discover_tests(): test_loader = unittest.TestLoader() return test_loader.discover('.', pattern='*_test.py') setuptools.setup( name='mesonwrap', version='0.0.4', author='The Meson development team', license='Apache 2', url='https://github.com/mesonbuild/wrapweb', packages=['mesonwrap', 'wrapweb'], package_data={ 'wrapweb': ['templates/*.html'], }, scripts=['mesonwrap.py'], test_suite='setup.discover_tests', )
Install templates with wrapweb package
Install templates with wrapweb package
Python
apache-2.0
mesonbuild/wrapweb,mesonbuild/wrapweb,mesonbuild/wrapweb
python
## Code Before: import setuptools import unittest def discover_tests(): test_loader = unittest.TestLoader() return test_loader.discover('.', pattern='*_test.py') setuptools.setup( name='mesonwrap', version='0.0.4', author='The Meson development team', license='Apache 2', url='https://github.com/mesonbuild/wrapweb', packages=['mesonwrap', 'wrapweb'], scripts=['mesonwrap.py'], test_suite='setup.discover_tests', ) ## Instruction: Install templates with wrapweb package ## Code After: import setuptools import unittest def discover_tests(): test_loader = unittest.TestLoader() return test_loader.discover('.', pattern='*_test.py') setuptools.setup( name='mesonwrap', version='0.0.4', author='The Meson development team', license='Apache 2', url='https://github.com/mesonbuild/wrapweb', packages=['mesonwrap', 'wrapweb'], package_data={ 'wrapweb': ['templates/*.html'], }, scripts=['mesonwrap.py'], test_suite='setup.discover_tests', )
# ... existing code ... license='Apache 2', url='https://github.com/mesonbuild/wrapweb', packages=['mesonwrap', 'wrapweb'], package_data={ 'wrapweb': ['templates/*.html'], }, scripts=['mesonwrap.py'], test_suite='setup.discover_tests', ) # ... rest of the code ...
11ec7ed43fbd5d6dae786f8320d1540080a55d57
tools/secret_key_generator.py
tools/secret_key_generator.py
from hashlib import md5, sha1 from base64 import urlsafe_b64encode as b64encode import random random.seed() def random_string(): """ Generate a random string (currently a random number as a string) """ return str(random.randint(0,100000)) def generate_key(max_length, data, encoder=b64encode, digester=md5): """ Generate a Base64-encoded 'random' key by hashing the data. data is a tuple of seeding values. Pass arbitrary encoder and digester for specific hashing and formatting of keys """ base = '' for arg in data: base += str(arg) key = encoder(digester(base).digest()) return key[:max_length] if __name__ == "__main__": print generate_key(40, (random_string(),))
import sys from hashlib import md5, sha1 from base64 import urlsafe_b64encode as b64encode import random random.seed() def random_string(): """ Generate a random string (currently a random number as a string) """ return str(random.randint(0,100000)) def generate_key(max_length, data, encoder=b64encode, digester=md5): """ Generate a Base64-encoded 'random' key by hashing the data. data is a tuple of seeding values. Pass arbitrary encoder and digester for specific hashing and formatting of keys """ base = '' for arg in data: base += str(arg) key = encoder(digester(base).digest()) return key[:max_length] if __name__ == "__main__": key = generate_key(40, (random_string(),)) if len(sys.argv) == 2: fp = open(sys.argv[1], 'w') fp.write("SECRET_KEY = \"%s\"\n" % key) fp.close() else: print key
Update script for init seahub_settings.py in Windows
Update script for init seahub_settings.py in Windows
Python
apache-2.0
madflow/seahub,madflow/seahub,miurahr/seahub,cloudcopy/seahub,miurahr/seahub,miurahr/seahub,Chilledheart/seahub,Chilledheart/seahub,miurahr/seahub,madflow/seahub,Chilledheart/seahub,cloudcopy/seahub,Chilledheart/seahub,madflow/seahub,cloudcopy/seahub,cloudcopy/seahub,Chilledheart/seahub,madflow/seahub
python
## Code Before: from hashlib import md5, sha1 from base64 import urlsafe_b64encode as b64encode import random random.seed() def random_string(): """ Generate a random string (currently a random number as a string) """ return str(random.randint(0,100000)) def generate_key(max_length, data, encoder=b64encode, digester=md5): """ Generate a Base64-encoded 'random' key by hashing the data. data is a tuple of seeding values. Pass arbitrary encoder and digester for specific hashing and formatting of keys """ base = '' for arg in data: base += str(arg) key = encoder(digester(base).digest()) return key[:max_length] if __name__ == "__main__": print generate_key(40, (random_string(),)) ## Instruction: Update script for init seahub_settings.py in Windows ## Code After: import sys from hashlib import md5, sha1 from base64 import urlsafe_b64encode as b64encode import random random.seed() def random_string(): """ Generate a random string (currently a random number as a string) """ return str(random.randint(0,100000)) def generate_key(max_length, data, encoder=b64encode, digester=md5): """ Generate a Base64-encoded 'random' key by hashing the data. data is a tuple of seeding values. Pass arbitrary encoder and digester for specific hashing and formatting of keys """ base = '' for arg in data: base += str(arg) key = encoder(digester(base).digest()) return key[:max_length] if __name__ == "__main__": key = generate_key(40, (random_string(),)) if len(sys.argv) == 2: fp = open(sys.argv[1], 'w') fp.write("SECRET_KEY = \"%s\"\n" % key) fp.close() else: print key
... import sys from hashlib import md5, sha1 from base64 import urlsafe_b64encode as b64encode import random ... return key[:max_length] if __name__ == "__main__": key = generate_key(40, (random_string(),)) if len(sys.argv) == 2: fp = open(sys.argv[1], 'w') fp.write("SECRET_KEY = \"%s\"\n" % key) fp.close() else: print key ...
b959783f7c8db26df03760bb03227ab49f1975ba
pywikibot/families/wikitech_family.py
pywikibot/families/wikitech_family.py
__version__ = '$Id$' from pywikibot import family # The Wikitech family class Family(family.Family): def __init__(self): super(Family, self).__init__() self.name = 'wikitech' self.langs = { 'en': 'wikitech.wikimedia.org', } def version(self, code): return '1.21wmf8' def scriptpath(self, code): return ''
__version__ = '$Id$' from pywikibot import family # The Wikitech family class Family(family.Family): def __init__(self): super(Family, self).__init__() self.name = 'wikitech' self.langs = { 'en': 'wikitech.wikimedia.org', } def version(self, code): return '1.21wmf8'
Remove overide of default scriptpath
Remove overide of default scriptpath git-svn-id: 9a050473c2aca1e14f53d73349e19b938c2cf203@11370 6a7f98fc-eeb0-4dc1-a6e2-c2c589a08aa6
Python
mit
legoktm/pywikipedia-rewrite
python
## Code Before: __version__ = '$Id$' from pywikibot import family # The Wikitech family class Family(family.Family): def __init__(self): super(Family, self).__init__() self.name = 'wikitech' self.langs = { 'en': 'wikitech.wikimedia.org', } def version(self, code): return '1.21wmf8' def scriptpath(self, code): return '' ## Instruction: Remove overide of default scriptpath git-svn-id: 9a050473c2aca1e14f53d73349e19b938c2cf203@11370 6a7f98fc-eeb0-4dc1-a6e2-c2c589a08aa6 ## Code After: __version__ = '$Id$' from pywikibot import family # The Wikitech family class Family(family.Family): def __init__(self): super(Family, self).__init__() self.name = 'wikitech' self.langs = { 'en': 'wikitech.wikimedia.org', } def version(self, code): return '1.21wmf8'
// ... existing code ... def version(self, code): return '1.21wmf8' // ... rest of the code ...
953660c5630b5d3bdf4567b68ba6e51929395ee5
tensorflow/core/platform/default/stream_executor_util.h
tensorflow/core/platform/default/stream_executor_util.h
q /* Copyright 2015 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #ifndef TENSORFLOW_PLATFORM_DEFAULT_STREAM_EXECUTOR_UTIL_H_ #define TENSORFLOW_PLATFORM_DEFAULT_STREAM_EXECUTOR_UTIL_H_ // IWYU pragma: private, include "third_party/tensorflow/core/platform/stream_executor_util.h" // IWYU pragma: friend third_party/tensorflow/core/platform/stream_executor_util.h #include "tensorflow/stream_executor/lib/status.h" namespace tensorflow { namespace gpu = ::perftools::gputools; // On the open-source platform, stream_executor currently uses // tensorflow::Status inline Status FromStreamExecutorStatus( const perftools::gputools::port::Status& s) { return s; } } // namespace tensorflow #endif // TENSORFLOW_PLATFORM_DEFAULT_STREAM_EXECUTOR_UTIL_H_
/* Copyright 2015 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #ifndef TENSORFLOW_PLATFORM_DEFAULT_STREAM_EXECUTOR_UTIL_H_ #define TENSORFLOW_PLATFORM_DEFAULT_STREAM_EXECUTOR_UTIL_H_ // IWYU pragma: private, include "third_party/tensorflow/core/platform/stream_executor_util.h" // IWYU pragma: friend third_party/tensorflow/core/platform/stream_executor_util.h #include "tensorflow/stream_executor/lib/status.h" namespace tensorflow { namespace gpu = ::perftools::gputools; // On the open-source platform, stream_executor currently uses // tensorflow::Status inline Status FromStreamExecutorStatus( const perftools::gputools::port::Status& s) { return s; } } // namespace tensorflow #endif // TENSORFLOW_PLATFORM_DEFAULT_STREAM_EXECUTOR_UTIL_H_
Fix build breakage from a typo. Change: 111528530
Fix build breakage from a typo. Change: 111528530
C
apache-2.0
manazhao/tf_recsys,martinwicke/tensorflow,manjunaths/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Carmezim/tensorflow,Kongsea/tensorflow,petewarden/tensorflow_makefile,ppries/tensorflow,rabipanda/tensorflow,AnishShah/tensorflow,dendisuhubdy/tensorflow,arborh/tensorflow,Mistobaan/tensorflow,renyi533/tensorflow,XueqingLin/tensorflow,Bulochkin/tensorflow_pack,Xeralux/tensorflow,SnakeJenny/TensorFlow,aselle/tensorflow,shreyasva/tensorflow,ZhangXinNan/tensorflow,petewarden/tensorflow,pcm17/tensorflow,ville-k/tensorflow,ville-k/tensorflow,freedomtan/tensorflow,thesuperzapper/tensorflow,chenjun0210/tensorflow,ZhangXinNan/tensorflow,MycChiu/tensorflow,AnishShah/tensorflow,ishay2b/tensorflow,llhe/tensorflow,cancan101/tensorflow,benoitsteiner/tensorflow-opencl,davidzchen/tensorflow,anand-c-goog/tensorflow,manazhao/tf_recsys,Intel-tensorflow/tensorflow,caisq/tensorflow,martinbede/second-sight,sandeepdsouza93/TensorFlow-15712,abhitopia/tensorflow,benoitsteiner/tensorflow-opencl,ppwwyyxx/tensorflow,Moriadry/tensorflow,lukas-krecan/tensorflow,sjperkins/tensorflow,calebfoss/tensorflow,apark263/tensorflow,freedomtan/tensorflow,johndpope/tensorflow,snnn/tensorflow,ppwwyyxx/tensorflow,davidzchen/tensorflow,paolodedios/tensorflow,RyanYoung25/tensorflow,HKUST-SING/tensorflow,dongjoon-hyun/tensorflow,peterbraden/tensorflow,haeusser/tensorflow,strint/tensorflow,krikru/tensorflow-opencl,jbedorf/tensorflow,freedomtan/tensorflow,4Quant/tensorflow,drpngx/tensorflow,horance-liu/tensorflow,ppwwyyxx/tensorflow,benoitsteiner/tensorflow-xsmm,jhseu/tensorflow,elingg/tensorflow,RyanYoung25/tensorflow,laosiaudi/tensorflow,lukeiwanski/tensorflow,asadziach/tensorflow,juharris/tensorflow,mrry/tensorflow,ageron/tensorflow,pavelchristof/gomoku-ai,dhalleine/tensorflow,eadgarchen/tensorflow,handroissuazo/tensorflow,mengxn/tensorflow,chris-chris/tensorflow,hehongliang/tensorflow,ninotoshi/tensorflow,mdrumond/tensorflow,laszlocsomor/tensorflow,alisidd/tensorflow,pavelchristof/gomoku-ai,seanli9jan/tensorflow,jeffzheng1/tensorflow,tornadozou/tensorflow,cxxgtxy/tensorflow,yongtang/tensorflow,ibab/tensorflow,av8ramit/tensorflow,Kongsea/tensorflow,pcm17/tensorflow,asimshankar/tensorflow,hsaputra/tensorflow,ravindrapanda/tensorflow,nanditav/15712-TensorFlow,annarev/tensorflow,jbedorf/tensorflow,adamtiger/tensorflow,hfp/tensorflow-xsmm,hsaputra/tensorflow,seaotterman/tensorflow,yanchen036/tensorflow,laszlocsomor/tensorflow,admcrae/tensorflow,laszlocsomor/tensorflow,kchodorow/tensorflow,johndpope/tensorflow,thjashin/tensorflow,tomasreimers/tensorflow-emscripten,DavidNorman/tensorflow,tiagofrepereira2012/tensorflow,manjunaths/tensorflow,dongjoon-hyun/tensorflow,memo/tensorflow,wangyum/tensorflow,sandeepgupta2k4/tensorflow,tomasreimers/tensorflow-emscripten,gibiansky/tensorflow,benoitsteiner/tensorflow-xsmm,krikru/tensorflow-opencl,Bismarrck/tensorflow,mortada/tensorflow,kobejean/tensorflow,asimshankar/tensorflow,pierreg/tensorflow,wchan/tensorflow,chenjun0210/tensorflow,yongtang/tensorflow,whn09/tensorflow,anand-c-goog/tensorflow,anilmuthineni/tensorflow,hsaputra/tensorflow,TakayukiSakai/tensorflow,ivano666/tensorflow,ghchinoy/tensorflow,ravindrapanda/tensorflow,odejesush/tensorflow,Mistobaan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,hfp/tensorflow-xsmm,Kongsea/tensorflow,alistairlow/tensorflow,karllessard/tensorflow,dhalleine/tensorflow,raymondxyang/tensorflow,pcm17/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,laosiaudi/tensorflow,thjashin/tensorflow,girving/tensorflow,JingJunYin/tensorflow,apark263/tensorflow,eerwitt/tensorflow,gautam1858/tensorflow,pierreg/tensorflow,tillahoffmann/tensorflow,JingJunYin/tensorflow,Bulochkin/tensorflow_pack,llhe/tensorflow,aam-at/tensorflow,MostafaGazar/tensorflow,pavelchristof/gomoku-ai,frreiss/tensorflow-fred,benoitsteiner/tensorflow-xsmm,JVillella/tensorflow,tntnatbry/tensorflow,chemelnucfin/tensorflow,tntnatbry/tensorflow,gunan/tensorflow,adit-chandra/tensorflow,aam-at/tensorflow,ibmsoe/tensorflow,whn09/tensorflow,XueqingLin/tensorflow,dongjoon-hyun/tensorflow,meteorcloudy/tensorflow,davidzchen/tensorflow,kobejean/tensorflow,jhseu/tensorflow,kevin-coder/tensorflow-fork,Kongsea/tensorflow,eerwitt/tensorflow,chris-chris/tensorflow,aldian/tensorflow,sandeepdsouza93/TensorFlow-15712,freedomtan/tensorflow,maciekcc/tensorflow,eaplatanios/tensorflow,davidzchen/tensorflow,guschmue/tensorflow,code-sauce/tensorflow,alsrgv/tensorflow,alshedivat/tensorflow,ZhangXinNan/tensorflow,alivecor/tensorflow,MoamerEncsConcordiaCa/tensorflow,apark263/tensorflow,brchiu/tensorflow,suiyuan2009/tensorflow,eerwitt/tensorflow,admcrae/tensorflow,asimshankar/tensorflow,alshedivat/tensorflow,taknevski/tensorflow-xsmm,whn09/tensorflow,juharris/tensorflow,tongwang01/tensorflow,chemelnucfin/tensorflow,hfp/tensorflow-xsmm,gojira/tensorflow,HaebinShin/tensorflow,gnieboer/tensorflow,Bismarrck/tensorflow,alsrgv/tensorflow,sjperkins/tensorflow,DavidNorman/tensorflow,Bulochkin/tensorflow_pack,codrut3/tensorflow,cancan101/tensorflow,mrry/tensorflow,thjashin/tensorflow,yongtang/tensorflow,ivano666/tensorflow,alheinecke/tensorflow-xsmm,hfp/tensorflow-xsmm,unsiloai/syntaxnet-ops-hack,ravindrapanda/tensorflow,elingg/tensorflow,codrut3/tensorflow,tillahoffmann/tensorflow,chris-chris/tensorflow,annarev/tensorflow,eaplatanios/tensorflow,alivecor/tensorflow,nburn42/tensorflow,bowang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,johndpope/tensorflow,alivecor/tensorflow,asadziach/tensorflow,sarvex/tensorflow,thjashin/tensorflow,JinXinDeep/tensorflow,MycChiu/tensorflow,Bulochkin/tensorflow_pack,hehongliang/tensorflow,hehongliang/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,suiyuan2009/tensorflow,alsrgv/tensorflow,odejesush/tensorflow,ppwwyyxx/tensorflow,alistairlow/tensorflow,LUTAN/tensorflow,tensorflow/tensorflow,ivano666/tensorflow,seanli9jan/tensorflow,kamcpp/tensorflow,alsrgv/tensorflow,eaplatanios/tensorflow,moonboots/tensorflow,gautam1858/tensorflow,ageron/tensorflow,ibmsoe/tensorflow,drpngx/tensorflow,mdrumond/tensorflow,xzturn/tensorflow,meteorcloudy/tensorflow,elingg/tensorflow,ZhangXinNan/tensorflow,Mistobaan/tensorflow,tomasreimers/tensorflow-emscripten,tornadozou/tensorflow,mortada/tensorflow,awni/tensorflow,ZhangXinNan/tensorflow,lukas-krecan/tensorflow,rabipanda/tensorflow,brchiu/tensorflow,nightjean/Deep-Learning,XueqingLin/tensorflow,alheinecke/tensorflow-xsmm,handroissuazo/tensorflow,dendisuhubdy/tensorflow,davidzchen/tensorflow,lukeiwanski/tensorflow,dendisuhubdy/tensorflow,adamtiger/tensorflow,karllessard/tensorflow,zasdfgbnm/tensorflow,mdrumond/tensorflow,ishay2b/tensorflow,ageron/tensorflow,cg31/tensorflow,JingJunYin/tensorflow,mdrumond/tensorflow,admcrae/tensorflow,markslwong/tensorflow,apark263/tensorflow,strint/tensorflow,adit-chandra/tensorflow,theflofly/tensorflow,jendap/tensorflow,aselle/tensorflow,sarvex/tensorflow,scenarios/tensorflow,allenlavoie/tensorflow,ppries/tensorflow,taknevski/tensorflow-xsmm,raymondxyang/tensorflow,hsaputra/tensorflow,hsaputra/tensorflow,TakayukiSakai/tensorflow,suiyuan2009/tensorflow,ibmsoe/tensorflow,jalexvig/tensorflow,yaroslavvb/tensorflow,mrry/tensorflow,llhe/tensorflow,jart/tensorflow,adit-chandra/tensorflow,theflofly/tensorflow,yongtang/tensorflow,dyoung418/tensorflow,seaotterman/tensorflow,admcrae/tensorflow,martinwicke/tensorflow,Intel-Corporation/tensorflow,ppwwyyxx/tensorflow,elingg/tensorflow,caisq/tensorflow,ychfan/tensorflow,ppries/tensorflow,benoitsteiner/tensorflow-opencl,nanditav/15712-TensorFlow,jhseu/tensorflow,dancingdan/tensorflow,frreiss/tensorflow-fred,cg31/tensorflow,tensorflow/tensorflow,shreyasva/tensorflow,wchan/tensorflow,Bismarrck/tensorflow,mavenlin/tensorflow,nightjean/Deep-Learning,JinXinDeep/tensorflow,jhaux/tensorflow,guschmue/tensorflow,aam-at/tensorflow,freedomtan/tensorflow,rdipietro/tensorflow,ville-k/tensorflow,DavidNorman/tensorflow,petewarden/tensorflow,krikru/tensorflow-opencl,elingg/tensorflow,pavelchristof/gomoku-ai,ville-k/tensorflow,ageron/tensorflow,rabipanda/tensorflow,EvenStrangest/tensorflow,chris-chris/tensorflow,freedomtan/tensorflow,petewarden/tensorflow,kchodorow/tensorflow,chemelnucfin/tensorflow,martinwicke/tensorflow,Moriadry/tensorflow,karllessard/tensorflow,yanchen036/tensorflow,alheinecke/tensorflow-xsmm,tensorflow/tensorflow-pywrap_saved_model,Carmezim/tensorflow,gojira/tensorflow,arborh/tensorflow,xodus7/tensorflow,naturali/tensorflow,yaroslavvb/tensorflow,ninotoshi/tensorflow,neilhan/tensorflow,manjunaths/tensorflow,codrut3/tensorflow,taknevski/tensorflow-xsmm,ibmsoe/tensorflow,scenarios/tensorflow,MostafaGazar/tensorflow,arborh/tensorflow,jhseu/tensorflow,tensorflow/tensorflow-pywrap_saved_model,codrut3/tensorflow,DavidNorman/tensorflow,Mistobaan/tensorflow,ArtsiomCh/tensorflow,peterbraden/tensorflow,horance-liu/tensorflow,handroissuazo/tensorflow,apark263/tensorflow,lukeiwanski/tensorflow-opencl,EvenStrangest/tensorflow,drpngx/tensorflow,markslwong/tensorflow,4Quant/tensorflow,aselle/tensorflow,asimshankar/tensorflow,kevin-coder/tensorflow-fork,davidzchen/tensorflow,drpngx/tensorflow,ran5515/DeepDecision,tiagofrepereira2012/tensorflow,scenarios/tensorflow,eadgarchen/tensorflow,guschmue/tensorflow,tensorflow/tensorflow-pywrap_saved_model,lukeiwanski/tensorflow-opencl,meteorcloudy/tensorflow,pierreg/tensorflow,alsrgv/tensorflow,unsiloai/syntaxnet-ops-hack,TakayukiSakai/tensorflow,alsrgv/tensorflow,alisidd/tensorflow,eerwitt/tensorflow,sandeepgupta2k4/tensorflow,davidzchen/tensorflow,ravindrapanda/tensorflow,HKUST-SING/tensorflow,vrv/tensorflow,a-doumoulakis/tensorflow,petewarden/tensorflow_makefile,naturali/tensorflow,HKUST-SING/tensorflow,pcm17/tensorflow,anilmuthineni/tensorflow,ArtsiomCh/tensorflow,shreyasva/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,markslwong/tensorflow,benoitsteiner/tensorflow,abhitopia/tensorflow,benoitsteiner/tensorflow-xsmm,Intel-tensorflow/tensorflow,awni/tensorflow,aam-at/tensorflow,pcm17/tensorflow,eadgarchen/tensorflow,allenlavoie/tensorflow,yufengg/tensorflow,tornadozou/tensorflow,girving/tensorflow,tensorflow/tensorflow,yaroslavvb/tensorflow,girving/tensorflow,JingJunYin/tensorflow,guschmue/tensorflow,dancingdan/tensorflow,pcm17/tensorflow,frreiss/tensorflow-fred,hlt-mt/tensorflow,neilhan/tensorflow,kevin-coder/tensorflow-fork,peterbraden/tensorflow,apark263/tensorflow,martinwicke/tensorflow,lakshayg/tensorflow,jostep/tensorflow,llhe/tensorflow,manipopopo/tensorflow,tensorflow/tensorflow,haeusser/tensorflow,anilmuthineni/tensorflow,asimshankar/tensorflow,lukeiwanski/tensorflow-opencl,asadziach/tensorflow,ninotoshi/tensorflow,naturali/tensorflow,alisidd/tensorflow,asimshankar/tensorflow,AnishShah/tensorflow,pavelchristof/gomoku-ai,tensorflow/tensorflow,lukeiwanski/tensorflow-opencl,kobejean/tensorflow,theflofly/tensorflow,panmari/tensorflow,RyanYoung25/tensorflow,mixturemodel-flow/tensorflow,allenlavoie/tensorflow,dancingdan/tensorflow,benoitsteiner/tensorflow,anilmuthineni/tensorflow,mortada/tensorflow,MycChiu/tensorflow,DCSaunders/tensorflow,aselle/tensorflow,bowang/tensorflow,adit-chandra/tensorflow,LUTAN/tensorflow,meteorcloudy/tensorflow,wangyum/tensorflow,HKUST-SING/tensorflow,manjunaths/tensorflow,odejesush/tensorflow,shreyasva/tensorflow,seanli9jan/tensorflow,martinwicke/tensorflow,chemelnucfin/tensorflow,frreiss/tensorflow-fred,girving/tensorflow,yaroslavvb/tensorflow,eerwitt/tensorflow,aldian/tensorflow,kobejean/tensorflow,snnn/tensorflow,jalexvig/tensorflow,hfp/tensorflow-xsmm,vrv/tensorflow,gibiansky/tensorflow,sandeepgupta2k4/tensorflow,ran5515/DeepDecision,sandeepgupta2k4/tensorflow,cg31/tensorflow,mengxn/tensorflow,mixturemodel-flow/tensorflow,Intel-tensorflow/tensorflow,lukeiwanski/tensorflow,ibab/tensorflow,johndpope/tensorflow,MycChiu/tensorflow,Moriadry/tensorflow,ghchinoy/tensorflow,girving/tensorflow,jhseu/tensorflow,jbedorf/tensorflow,jendap/tensorflow,panmari/tensorflow,nolanliou/tensorflow,bowang/tensorflow,LUTAN/tensorflow,yaroslavvb/tensorflow,caisq/tensorflow,MoamerEncsConcordiaCa/tensorflow,seanli9jan/tensorflow,karllessard/tensorflow,xzturn/tensorflow,thjashin/tensorflow,drpngx/tensorflow,elingg/tensorflow,pcm17/tensorflow,tiagofrepereira2012/tensorflow,tomasreimers/tensorflow-emscripten,Mistobaan/tensorflow,ravindrapanda/tensorflow,xodus7/tensorflow,cxxgtxy/tensorflow,krikru/tensorflow-opencl,4Quant/tensorflow,haeusser/tensorflow,zasdfgbnm/tensorflow,guschmue/tensorflow,Xeralux/tensorflow,cxxgtxy/tensorflow,kobejean/tensorflow,tiagofrepereira2012/tensorflow,nburn42/tensorflow,alheinecke/tensorflow-xsmm,MycChiu/tensorflow,mavenlin/tensorflow,juharris/tensorflow,lukeiwanski/tensorflow-opencl,paolodedios/tensorflow,nolanliou/tensorflow,freedomtan/tensorflow,rdipietro/tensorflow,aam-at/tensorflow,codrut3/tensorflow,jeffzheng1/tensorflow,jostep/tensorflow,handroissuazo/tensorflow,mixturemodel-flow/tensorflow,petewarden/tensorflow_makefile,mixturemodel-flow/tensorflow,seaotterman/tensorflow,ppwwyyxx/tensorflow,JinXinDeep/tensorflow,yongtang/tensorflow,JVillella/tensorflow,guschmue/tensorflow,andrewcmyers/tensorflow,tornadozou/tensorflow,kchodorow/tensorflow,manazhao/tf_recsys,zasdfgbnm/tensorflow,karllessard/tensorflow,nanditav/15712-TensorFlow,allenlavoie/tensorflow,nikste/tensorflow,dendisuhubdy/tensorflow,meteorcloudy/tensorflow,gunan/tensorflow,jbedorf/tensorflow,nikste/tensorflow,hsaputra/tensorflow,ZhangXinNan/tensorflow,jalexvig/tensorflow,MycChiu/tensorflow,laosiaudi/tensorflow,eaplatanios/tensorflow,ppries/tensorflow,Xeralux/tensorflow,alshedivat/tensorflow,kobejean/tensorflow,petewarden/tensorflow,Bulochkin/tensorflow_pack,gnieboer/tensorflow,HKUST-SING/tensorflow,tornadozou/tensorflow,jalexvig/tensorflow,renyi533/tensorflow,jhaux/tensorflow,yufengg/tensorflow,EvenStrangest/tensorflow,seanli9jan/tensorflow,ravindrapanda/tensorflow,ninotoshi/tensorflow,seaotterman/tensorflow,ychfan/tensorflow,RyanYoung25/tensorflow,handroissuazo/tensorflow,ppwwyyxx/tensorflow,frreiss/tensorflow-fred,XueqingLin/tensorflow,Bulochkin/tensorflow_pack,cancan101/tensorflow,jalexvig/tensorflow,kevin-coder/tensorflow-fork,dyoung418/tensorflow,aam-at/tensorflow,renyi533/tensorflow,HaebinShin/tensorflow,jalexvig/tensorflow,tiagofrepereira2012/tensorflow,HaebinShin/tensorflow,allenlavoie/tensorflow,wangyum/tensorflow,pierreg/tensorflow,awni/tensorflow,yaroslavvb/tensorflow,brchiu/tensorflow,dancingdan/tensorflow,tomasreimers/tensorflow-emscripten,haeusser/tensorflow,chemelnucfin/tensorflow,jhaux/tensorflow,meteorcloudy/tensorflow,admcrae/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jhaux/tensorflow,ageron/tensorflow,mixturemodel-flow/tensorflow,cg31/tensorflow,caisq/tensorflow,Carmezim/tensorflow,gunan/tensorflow,odejesush/tensorflow,mengxn/tensorflow,dhalleine/tensorflow,pavelchristof/gomoku-ai,nolanliou/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,panmari/tensorflow,seanli9jan/tensorflow,arborh/tensorflow,renyi533/tensorflow,thjashin/tensorflow,aselle/tensorflow,elingg/tensorflow,guschmue/tensorflow,yanchen036/tensorflow,pierreg/tensorflow,kobejean/tensorflow,unsiloai/syntaxnet-ops-hack,yufengg/tensorflow,kobejean/tensorflow,paolodedios/tensorflow,admcrae/tensorflow,AndreasMadsen/tensorflow,eadgarchen/tensorflow,codrut3/tensorflow,lukeiwanski/tensorflow,MostafaGazar/tensorflow,maciekcc/tensorflow,karllessard/tensorflow,freedomtan/tensorflow,whn09/tensorflow,krikru/tensorflow-opencl,paolodedios/tensorflow,guschmue/tensorflow,wangyum/tensorflow,unsiloai/syntaxnet-ops-hack,jeffzheng1/tensorflow,chris-chris/tensorflow,jwlawson/tensorflow,DavidNorman/tensorflow,strint/tensorflow,DCSaunders/tensorflow,manipopopo/tensorflow,wchan/tensorflow,hlt-mt/tensorflow,markslwong/tensorflow,jhaux/tensorflow,gibiansky/tensorflow,thesuperzapper/tensorflow,HKUST-SING/tensorflow,LUTAN/tensorflow,kchodorow/tensorflow,ychfan/tensorflow,Mazecreator/tensorflow,nightjean/Deep-Learning,sandeepgupta2k4/tensorflow,Mazecreator/tensorflow,seanli9jan/tensorflow,Bulochkin/tensorflow_pack,benoitsteiner/tensorflow,jostep/tensorflow,johndpope/tensorflow,sandeepgupta2k4/tensorflow,chenjun0210/tensorflow,taknevski/tensorflow-xsmm,adit-chandra/tensorflow,girving/tensorflow,tntnatbry/tensorflow,martinwicke/tensorflow,lukeiwanski/tensorflow,gojira/tensorflow,pcm17/tensorflow,alshedivat/tensorflow,jbedorf/tensorflow,lukeiwanski/tensorflow-opencl,wangyum/tensorflow,petewarden/tensorflow_makefile,yanchen036/tensorflow,snnn/tensorflow,Moriadry/tensorflow,AnishShah/tensorflow,jart/tensorflow,Bismarrck/tensorflow,awni/tensorflow,aldian/tensorflow,zasdfgbnm/tensorflow,snnn/tensorflow,benoitsteiner/tensorflow-xsmm,sjperkins/tensorflow,alshedivat/tensorflow,codrut3/tensorflow,alshedivat/tensorflow,ishay2b/tensorflow,EvenStrangest/tensorflow,odejesush/tensorflow,DCSaunders/tensorflow,RyanYoung25/tensorflow,ninotoshi/tensorflow,panmari/tensorflow,dyoung418/tensorflow,Intel-Corporation/tensorflow,Kongsea/tensorflow,gnieboer/tensorflow,AndreasMadsen/tensorflow,kevin-coder/tensorflow-fork,kamcpp/tensorflow,annarev/tensorflow,aselle/tensorflow,gautam1858/tensorflow,Bismarrck/tensorflow,krikru/tensorflow-opencl,MostafaGazar/tensorflow,dongjoon-hyun/tensorflow,ppries/tensorflow,ychfan/tensorflow,jeffzheng1/tensorflow,calebfoss/tensorflow,hfp/tensorflow-xsmm,yufengg/tensorflow,andrewcmyers/tensorflow,gibiansky/tensorflow,MoamerEncsConcordiaCa/tensorflow,anand-c-goog/tensorflow,HaebinShin/tensorflow,rdipietro/tensorflow,chenjun0210/tensorflow,cxxgtxy/tensorflow,gnieboer/tensorflow,neilhan/tensorflow,panmari/tensorflow,nikste/tensorflow,lukeiwanski/tensorflow,mixturemodel-flow/tensorflow,tiagofrepereira2012/tensorflow,aselle/tensorflow,nburn42/tensorflow,ghchinoy/tensorflow,allenlavoie/tensorflow,codrut3/tensorflow,xodus7/tensorflow,anilmuthineni/tensorflow,asimshankar/tensorflow,dyoung418/tensorflow,mortada/tensorflow,code-sauce/tensorflow,meteorcloudy/tensorflow,asimshankar/tensorflow,frreiss/tensorflow-fred,nolanliou/tensorflow,code-sauce/tensorflow,raymondxyang/tensorflow,nolanliou/tensorflow,MostafaGazar/tensorflow,gibiansky/tensorflow,hlt-mt/tensorflow,ArtsiomCh/tensorflow,pierreg/tensorflow,JinXinDeep/tensorflow,code-sauce/tensorflow,unsiloai/syntaxnet-ops-hack,nanditav/15712-TensorFlow,davidzchen/tensorflow,nightjean/Deep-Learning,horance-liu/tensorflow,MostafaGazar/tensorflow,haeusser/tensorflow,arborh/tensorflow,rdipietro/tensorflow,shreyasva/tensorflow,vrv/tensorflow,Intel-Corporation/tensorflow,memo/tensorflow,Mistobaan/tensorflow,adit-chandra/tensorflow,mavenlin/tensorflow,lakshayg/tensorflow,MoamerEncsConcordiaCa/tensorflow,markslwong/tensorflow,jeffzheng1/tensorflow,ghchinoy/tensorflow,mortada/tensorflow,juharris/tensorflow,mrry/tensorflow,eaplatanios/tensorflow,Intel-tensorflow/tensorflow,hehongliang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,code-sauce/tensorflow,benoitsteiner/tensorflow,martinbede/second-sight,dyoung418/tensorflow,sandeepdsouza93/TensorFlow-15712,pavelchristof/gomoku-ai,frreiss/tensorflow-fred,aldian/tensorflow,ghchinoy/tensorflow,bowang/tensorflow,laosiaudi/tensorflow,wchan/tensorflow,caisq/tensorflow,TakayukiSakai/tensorflow,cancan101/tensorflow,mengxn/tensorflow,tensorflow/tensorflow,mdrumond/tensorflow,xzturn/tensorflow,xzturn/tensorflow,juharris/tensorflow,jwlawson/tensorflow,AndreasMadsen/tensorflow,hsaputra/tensorflow,aselle/tensorflow,tongwang01/tensorflow,SnakeJenny/TensorFlow,petewarden/tensorflow,rabipanda/tensorflow,calebfoss/tensorflow,chemelnucfin/tensorflow,snnn/tensorflow,eerwitt/tensorflow,gnieboer/tensorflow,martinwicke/tensorflow,xzturn/tensorflow,chenjun0210/tensorflow,dendisuhubdy/tensorflow,panmari/tensorflow,Mazecreator/tensorflow,brchiu/tensorflow,renyi533/tensorflow,mortada/tensorflow,adit-chandra/tensorflow,gibiansky/tensorflow,gautam1858/tensorflow,nikste/tensorflow,drpngx/tensorflow,XueqingLin/tensorflow,asimshankar/tensorflow,dongjoon-hyun/tensorflow,gibiansky/tensorflow,tiagofrepereira2012/tensorflow,meteorcloudy/tensorflow,EvenStrangest/tensorflow,sjperkins/tensorflow,nanditav/15712-TensorFlow,jwlawson/tensorflow,nolanliou/tensorflow,thesuperzapper/tensorflow,adamtiger/tensorflow,naturali/tensorflow,gunan/tensorflow,renyi533/tensorflow,rdipietro/tensorflow,nburn42/tensorflow,Intel-Corporation/tensorflow,sjperkins/tensorflow,vrv/tensorflow,arborh/tensorflow,sjperkins/tensorflow,nikste/tensorflow,jbedorf/tensorflow,AnishShah/tensorflow,zycdragonball/tensorflow,whn09/tensorflow,martinwicke/tensorflow,tntnatbry/tensorflow,panmari/tensorflow,wchan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,ghchinoy/tensorflow,sandeepdsouza93/TensorFlow-15712,jart/tensorflow,alistairlow/tensorflow,handroissuazo/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,xodus7/tensorflow,jwlawson/tensorflow,xodus7/tensorflow,eaplatanios/tensorflow,jeffzheng1/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,haeusser/tensorflow,JinXinDeep/tensorflow,Kongsea/tensorflow,mengxn/tensorflow,jalexvig/tensorflow,krikru/tensorflow-opencl,awni/tensorflow,ville-k/tensorflow,suiyuan2009/tensorflow,scenarios/tensorflow,Xeralux/tensorflow,mavenlin/tensorflow,juharris/tensorflow,nightjean/Deep-Learning,martinbede/second-sight,jendap/tensorflow,sarvex/tensorflow,martinbede/second-sight,jhseu/tensorflow,av8ramit/tensorflow,peterbraden/tensorflow,karllessard/tensorflow,arborh/tensorflow,yongtang/tensorflow,moonboots/tensorflow,mengxn/tensorflow,kevin-coder/tensorflow-fork,jhaux/tensorflow,HKUST-SING/tensorflow,MoamerEncsConcordiaCa/tensorflow,martinwicke/tensorflow,RapidApplicationDevelopment/tensorflow,jendap/tensorflow,mixturemodel-flow/tensorflow,seanli9jan/tensorflow,ninotoshi/tensorflow,ravindrapanda/tensorflow,dancingdan/tensorflow,yaroslavvb/tensorflow,jwlawson/tensorflow,ArtsiomCh/tensorflow,benoitsteiner/tensorflow-xsmm,codrut3/tensorflow,alheinecke/tensorflow-xsmm,ageron/tensorflow,anilmuthineni/tensorflow,ZhangXinNan/tensorflow,chemelnucfin/tensorflow,jendap/tensorflow,maciekcc/tensorflow,ivano666/tensorflow,handroissuazo/tensorflow,nikste/tensorflow,av8ramit/tensorflow,jalexvig/tensorflow,alisidd/tensorflow,sjperkins/tensorflow,ychfan/tensorflow,adamtiger/tensorflow,sjperkins/tensorflow,pavelchristof/gomoku-ai,rdipietro/tensorflow,DCSaunders/tensorflow,llhe/tensorflow,wangyum/tensorflow,bowang/tensorflow,jendap/tensorflow,dyoung418/tensorflow,dancingdan/tensorflow,ibmsoe/tensorflow,horance-liu/tensorflow,mrry/tensorflow,jalexvig/tensorflow,neilhan/tensorflow,jostep/tensorflow,jart/tensorflow,zasdfgbnm/tensorflow,eadgarchen/tensorflow,manazhao/tf_recsys,tornadozou/tensorflow,ran5515/DeepDecision,hehongliang/tensorflow,ArtsiomCh/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,nanditav/15712-TensorFlow,gojira/tensorflow,elingg/tensorflow,DavidNorman/tensorflow,maciekcc/tensorflow,DCSaunders/tensorflow,ppwwyyxx/tensorflow,ravindrapanda/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,MycChiu/tensorflow,mdrumond/tensorflow,annarev/tensorflow,lukeiwanski/tensorflow,tensorflow/tensorflow-pywrap_saved_model,ibab/tensorflow,eerwitt/tensorflow,gautam1858/tensorflow,XueqingLin/tensorflow,gunan/tensorflow,jhseu/tensorflow,alistairlow/tensorflow,aldian/tensorflow,caisq/tensorflow,kobejean/tensorflow,andrewcmyers/tensorflow,Moriadry/tensorflow,scenarios/tensorflow,mdrumond/tensorflow,seaotterman/tensorflow,jwlawson/tensorflow,hfp/tensorflow-xsmm,shreyasva/tensorflow,jbedorf/tensorflow,mrry/tensorflow,alistairlow/tensorflow,peterbraden/tensorflow,thesuperzapper/tensorflow,Moriadry/tensorflow,jendap/tensorflow,sandeepgupta2k4/tensorflow,juharris/tensorflow,jeffzheng1/tensorflow,HaebinShin/tensorflow,yufengg/tensorflow,ivano666/tensorflow,alisidd/tensorflow,wangyum/tensorflow,Mazecreator/tensorflow,shreyasva/tensorflow,yaroslavvb/tensorflow,ghchinoy/tensorflow,pcm17/tensorflow,Xeralux/tensorflow,ibmsoe/tensorflow,code-sauce/tensorflow,allenlavoie/tensorflow,abhitopia/tensorflow,nikste/tensorflow,xzturn/tensorflow,adit-chandra/tensorflow,alsrgv/tensorflow,tillahoffmann/tensorflow,theflofly/tensorflow,anilmuthineni/tensorflow,cg31/tensorflow,codrut3/tensorflow,gibiansky/tensorflow,kevin-coder/tensorflow-fork,abhitopia/tensorflow,lakshayg/tensorflow,benoitsteiner/tensorflow-opencl,maciekcc/tensorflow,petewarden/tensorflow_makefile,TakayukiSakai/tensorflow,tomasreimers/tensorflow-emscripten,xzturn/tensorflow,girving/tensorflow,scenarios/tensorflow,alshedivat/tensorflow,gunan/tensorflow,calebfoss/tensorflow,horance-liu/tensorflow,strint/tensorflow,cxxgtxy/tensorflow,xzturn/tensorflow,petewarden/tensorflow,with-git/tensorflow,eadgarchen/tensorflow,snnn/tensorflow,tntnatbry/tensorflow,JVillella/tensorflow,jhseu/tensorflow,rabipanda/tensorflow,llhe/tensorflow,pierreg/tensorflow,caisq/tensorflow,nolanliou/tensorflow,adit-chandra/tensorflow,laszlocsomor/tensorflow,seanli9jan/tensorflow,adit-chandra/tensorflow,thesuperzapper/tensorflow,markslwong/tensorflow,vrv/tensorflow,strint/tensorflow,Bulochkin/tensorflow_pack,MycChiu/tensorflow,kchodorow/tensorflow,Mazecreator/tensorflow,dancingdan/tensorflow,Intel-tensorflow/tensorflow,nolanliou/tensorflow,jwlawson/tensorflow,gojira/tensorflow,tntnatbry/tensorflow,drpngx/tensorflow,LUTAN/tensorflow,mdrumond/tensorflow,EvenStrangest/tensorflow,RapidApplicationDevelopment/tensorflow,eaplatanios/tensorflow,petewarden/tensorflow_makefile,lukas-krecan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,chemelnucfin/tensorflow,manipopopo/tensorflow,neilhan/tensorflow,nanditav/15712-TensorFlow,a-doumoulakis/tensorflow,adamtiger/tensorflow,tensorflow/tensorflow,dongjoon-hyun/tensorflow,dhalleine/tensorflow,whn09/tensorflow,with-git/tensorflow,tensorflow/tensorflow-pywrap_saved_model,manjunaths/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,horance-liu/tensorflow,Bismarrck/tensorflow,LUTAN/tensorflow,calebfoss/tensorflow,Carmezim/tensorflow,LUTAN/tensorflow,lukeiwanski/tensorflow-opencl,whn09/tensorflow,lukas-krecan/tensorflow,jhaux/tensorflow,Carmezim/tensorflow,cancan101/tensorflow,allenlavoie/tensorflow,gibiansky/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,jhseu/tensorflow,bowang/tensorflow,RyanYoung25/tensorflow,nikste/tensorflow,johndpope/tensorflow,andrewcmyers/tensorflow,cxxgtxy/tensorflow,nburn42/tensorflow,girving/tensorflow,odejesush/tensorflow,calebfoss/tensorflow,cancan101/tensorflow,Intel-tensorflow/tensorflow,anand-c-goog/tensorflow,brchiu/tensorflow,eaplatanios/tensorflow,chris-chris/tensorflow,abhitopia/tensorflow,ppries/tensorflow,alistairlow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,av8ramit/tensorflow,EvenStrangest/tensorflow,rdipietro/tensorflow,chemelnucfin/tensorflow,RyanYoung25/tensorflow,hehongliang/tensorflow,ville-k/tensorflow,jhaux/tensorflow,calebfoss/tensorflow,paolodedios/tensorflow,thesuperzapper/tensorflow,alshedivat/tensorflow,seaotterman/tensorflow,freedomtan/tensorflow,cg31/tensorflow,alheinecke/tensorflow-xsmm,nightjean/Deep-Learning,sarvex/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,av8ramit/tensorflow,petewarden/tensorflow,mortada/tensorflow,aldian/tensorflow,yanchen036/tensorflow,calebfoss/tensorflow,rabipanda/tensorflow,renyi533/tensorflow,neilhan/tensorflow,JingJunYin/tensorflow,maciekcc/tensorflow,tillahoffmann/tensorflow,paolodedios/tensorflow,wchan/tensorflow,asadziach/tensorflow,code-sauce/tensorflow,LUTAN/tensorflow,xzturn/tensorflow,tongwang01/tensorflow,SnakeJenny/TensorFlow,Bulochkin/tensorflow_pack,manjunaths/tensorflow,rabipanda/tensorflow,ghchinoy/tensorflow,anilmuthineni/tensorflow,xzturn/tensorflow,suiyuan2009/tensorflow,manipopopo/tensorflow,girving/tensorflow,lakshayg/tensorflow,petewarden/tensorflow,dendisuhubdy/tensorflow,memo/tensorflow,aldian/tensorflow,eadgarchen/tensorflow,tongwang01/tensorflow,gautam1858/tensorflow,jwlawson/tensorflow,JinXinDeep/tensorflow,Carmezim/tensorflow,DavidNorman/tensorflow,abhitopia/tensorflow,apark263/tensorflow,ibmsoe/tensorflow,haeusser/tensorflow,AndreasMadsen/tensorflow,dongjoon-hyun/tensorflow,Intel-Corporation/tensorflow,Bismarrck/tensorflow,asadziach/tensorflow,cxxgtxy/tensorflow,apark263/tensorflow,naturali/tensorflow,brchiu/tensorflow,SnakeJenny/TensorFlow,ville-k/tensorflow,tongwang01/tensorflow,a-doumoulakis/tensorflow,Bulochkin/tensorflow_pack,Mistobaan/tensorflow,AnishShah/tensorflow,nightjean/Deep-Learning,rdipietro/tensorflow,zasdfgbnm/tensorflow,manjunaths/tensorflow,jwlawson/tensorflow,nburn42/tensorflow,sandeepdsouza93/TensorFlow-15712,Kongsea/tensorflow,davidzchen/tensorflow,dancingdan/tensorflow,chenjun0210/tensorflow,girving/tensorflow,strint/tensorflow,cancan101/tensorflow,zasdfgbnm/tensorflow,aselle/tensorflow,sarvex/tensorflow,awni/tensorflow,SnakeJenny/TensorFlow,sandeepdsouza93/TensorFlow-15712,ppwwyyxx/tensorflow,chenjun0210/tensorflow,jbedorf/tensorflow,AnishShah/tensorflow,renyi533/tensorflow,kchodorow/tensorflow,ppries/tensorflow,JingJunYin/tensorflow,rabipanda/tensorflow,mavenlin/tensorflow,RapidApplicationDevelopment/tensorflow,andrewcmyers/tensorflow,jart/tensorflow,maciekcc/tensorflow,martinbede/second-sight,gunan/tensorflow,ishay2b/tensorflow,JVillella/tensorflow,ageron/tensorflow,admcrae/tensorflow,naturali/tensorflow,horance-liu/tensorflow,apark263/tensorflow,benoitsteiner/tensorflow-opencl,tongwang01/tensorflow,theflofly/tensorflow,4Quant/tensorflow,tensorflow/tensorflow,alsrgv/tensorflow,DCSaunders/tensorflow,tomasreimers/tensorflow-emscripten,hsaputra/tensorflow,rdipietro/tensorflow,anand-c-goog/tensorflow,manipopopo/tensorflow,theflofly/tensorflow,dongjoon-hyun/tensorflow,benoitsteiner/tensorflow-opencl,jhseu/tensorflow,mavenlin/tensorflow,chris-chris/tensorflow,taknevski/tensorflow-xsmm,eerwitt/tensorflow,AnishShah/tensorflow,mrry/tensorflow,martinbede/second-sight,apark263/tensorflow,memo/tensorflow,Xeralux/tensorflow,peterbraden/tensorflow,wchan/tensorflow,dancingdan/tensorflow,Intel-tensorflow/tensorflow,anilmuthineni/tensorflow,lakshayg/tensorflow,moonboots/tensorflow,cg31/tensorflow,lukeiwanski/tensorflow-opencl,with-git/tensorflow,nburn42/tensorflow,admcrae/tensorflow,andrewcmyers/tensorflow,laszlocsomor/tensorflow,DCSaunders/tensorflow,raymondxyang/tensorflow,jeffzheng1/tensorflow,markslwong/tensorflow,Intel-Corporation/tensorflow,snnn/tensorflow,calebfoss/tensorflow,maciekcc/tensorflow,alheinecke/tensorflow-xsmm,rabipanda/tensorflow,kamcpp/tensorflow,apark263/tensorflow,Carmezim/tensorflow,tillahoffmann/tensorflow,karllessard/tensorflow,asadziach/tensorflow,ghchinoy/tensorflow,hlt-mt/tensorflow,brchiu/tensorflow,frreiss/tensorflow-fred,manazhao/tf_recsys,abhitopia/tensorflow,alisidd/tensorflow,memo/tensorflow,sarvex/tensorflow,krikru/tensorflow-opencl,caisq/tensorflow,unsiloai/syntaxnet-ops-hack,AnishShah/tensorflow,benoitsteiner/tensorflow-xsmm,juharris/tensorflow,benoitsteiner/tensorflow-xsmm,petewarden/tensorflow,theflofly/tensorflow,Moriadry/tensorflow,mengxn/tensorflow,peterbraden/tensorflow,DavidNorman/tensorflow,paolodedios/tensorflow,renyi533/tensorflow,annarev/tensorflow,awni/tensorflow,JinXinDeep/tensorflow,gunan/tensorflow,laosiaudi/tensorflow,alshedivat/tensorflow,ishay2b/tensorflow,jalexvig/tensorflow,benoitsteiner/tensorflow-xsmm,karllessard/tensorflow,Intel-Corporation/tensorflow,RapidApplicationDevelopment/tensorflow,snnn/tensorflow,ychfan/tensorflow,aam-at/tensorflow,a-doumoulakis/tensorflow,a-doumoulakis/tensorflow,av8ramit/tensorflow,naturali/tensorflow,wangyum/tensorflow,handroissuazo/tensorflow,tensorflow/tensorflow-pywrap_saved_model,ArtsiomCh/tensorflow,aselle/tensorflow,awni/tensorflow,gnieboer/tensorflow,tomasreimers/tensorflow-emscripten,panmari/tensorflow,xodus7/tensorflow,Xeralux/tensorflow,lukas-krecan/tensorflow,ibmsoe/tensorflow,naturali/tensorflow,chenjun0210/tensorflow,memo/tensorflow,alivecor/tensorflow,tongwang01/tensorflow,eerwitt/tensorflow,RapidApplicationDevelopment/tensorflow,HKUST-SING/tensorflow,alsrgv/tensorflow,jendap/tensorflow,theflofly/tensorflow,benoitsteiner/tensorflow,johndpope/tensorflow,tensorflow/tensorflow,hlt-mt/tensorflow,haeusser/tensorflow,ibab/tensorflow,girving/tensorflow,alistairlow/tensorflow,alisidd/tensorflow,jart/tensorflow,SnakeJenny/TensorFlow,ychfan/tensorflow,ran5515/DeepDecision,benoitsteiner/tensorflow-opencl,dancingdan/tensorflow,a-doumoulakis/tensorflow,johndpope/tensorflow,with-git/tensorflow,sandeepdsouza93/TensorFlow-15712,manipopopo/tensorflow,Mistobaan/tensorflow,rabipanda/tensorflow,kamcpp/tensorflow,with-git/tensorflow,hehongliang/tensorflow,benoitsteiner/tensorflow-opencl,ran5515/DeepDecision,kevin-coder/tensorflow-fork,whn09/tensorflow,freedomtan/tensorflow,jbedorf/tensorflow,zycdragonball/tensorflow,dhalleine/tensorflow,drpngx/tensorflow,neilhan/tensorflow,nanditav/15712-TensorFlow,ibab/tensorflow,alsrgv/tensorflow,asadziach/tensorflow,cg31/tensorflow,Xeralux/tensorflow,dongjoon-hyun/tensorflow,frreiss/tensorflow-fred,dendisuhubdy/tensorflow,hlt-mt/tensorflow,mortada/tensorflow,ppwwyyxx/tensorflow,mavenlin/tensorflow,zasdfgbnm/tensorflow,seanli9jan/tensorflow,zycdragonball/tensorflow,LUTAN/tensorflow,kchodorow/tensorflow,Intel-Corporation/tensorflow,andrewcmyers/tensorflow,karllessard/tensorflow,brchiu/tensorflow,chenjun0210/tensorflow,ghchinoy/tensorflow,jart/tensorflow,alivecor/tensorflow,seaotterman/tensorflow,horance-liu/tensorflow,ArtsiomCh/tensorflow,arborh/tensorflow,annarev/tensorflow,Mazecreator/tensorflow,dongjoon-hyun/tensorflow,SnakeJenny/TensorFlow,JingJunYin/tensorflow,MycChiu/tensorflow,dhalleine/tensorflow,taknevski/tensorflow-xsmm,jart/tensorflow,frreiss/tensorflow-fred,lukeiwanski/tensorflow-opencl,AndreasMadsen/tensorflow,benoitsteiner/tensorflow,kamcpp/tensorflow,yanchen036/tensorflow,manipopopo/tensorflow,alshedivat/tensorflow,meteorcloudy/tensorflow,seaotterman/tensorflow,renyi533/tensorflow,allenlavoie/tensorflow,aam-at/tensorflow,snnn/tensorflow,kchodorow/tensorflow,pierreg/tensorflow,code-sauce/tensorflow,yongtang/tensorflow,yongtang/tensorflow,allenlavoie/tensorflow,llhe/tensorflow,cancan101/tensorflow,unsiloai/syntaxnet-ops-hack,jbedorf/tensorflow,laosiaudi/tensorflow,odejesush/tensorflow,thjashin/tensorflow,mortada/tensorflow,gunan/tensorflow,yufengg/tensorflow,cg31/tensorflow,lakshayg/tensorflow,benoitsteiner/tensorflow-opencl,laszlocsomor/tensorflow,petewarden/tensorflow,dongjoon-hyun/tensorflow,whn09/tensorflow,DCSaunders/tensorflow,aam-at/tensorflow,JinXinDeep/tensorflow,zasdfgbnm/tensorflow,tntnatbry/tensorflow,alistairlow/tensorflow,Bulochkin/tensorflow_pack,TakayukiSakai/tensorflow,laszlocsomor/tensorflow,caisq/tensorflow,davidzchen/tensorflow,karllessard/tensorflow,llhe/tensorflow,eaplatanios/tensorflow,brchiu/tensorflow,mixturemodel-flow/tensorflow,ZhangXinNan/tensorflow,MostafaGazar/tensorflow,JVillella/tensorflow,Bismarrck/tensorflow,asadziach/tensorflow,theflofly/tensorflow,RapidApplicationDevelopment/tensorflow,ivano666/tensorflow,MostafaGazar/tensorflow,av8ramit/tensorflow,alistairlow/tensorflow,xodus7/tensorflow,wchan/tensorflow,eadgarchen/tensorflow,MostafaGazar/tensorflow,ivano666/tensorflow,taknevski/tensorflow-xsmm,taknevski/tensorflow-xsmm,benoitsteiner/tensorflow,aam-at/tensorflow,tornadozou/tensorflow,laosiaudi/tensorflow,ageron/tensorflow,thesuperzapper/tensorflow,RapidApplicationDevelopment/tensorflow,Intel-tensorflow/tensorflow,chemelnucfin/tensorflow,annarev/tensorflow,nightjean/Deep-Learning,av8ramit/tensorflow,lakshayg/tensorflow,HKUST-SING/tensorflow,ZhangXinNan/tensorflow,DCSaunders/tensorflow,lukas-krecan/tensorflow,manjunaths/tensorflow,MoamerEncsConcordiaCa/tensorflow,jhseu/tensorflow,anand-c-goog/tensorflow,jostep/tensorflow,vrv/tensorflow,thjashin/tensorflow,tornadozou/tensorflow,moonboots/tensorflow,ZhangXinNan/tensorflow,moonboots/tensorflow,lukas-krecan/tensorflow,aldian/tensorflow,Bismarrck/tensorflow,DavidNorman/tensorflow,jwlawson/tensorflow,av8ramit/tensorflow,bowang/tensorflow,abhitopia/tensorflow,RapidApplicationDevelopment/tensorflow,gunan/tensorflow,neilhan/tensorflow,XueqingLin/tensorflow,jendap/tensorflow,MoamerEncsConcordiaCa/tensorflow,4Quant/tensorflow,kobejean/tensorflow,neilhan/tensorflow,AnishShah/tensorflow,gnieboer/tensorflow,admcrae/tensorflow,snnn/tensorflow,gnieboer/tensorflow,dhalleine/tensorflow,Bulochkin/tensorflow_pack,dendisuhubdy/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,DCSaunders/tensorflow,JVillella/tensorflow,alisidd/tensorflow,anand-c-goog/tensorflow,gojira/tensorflow,JingJunYin/tensorflow,theflofly/tensorflow,gnieboer/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,ibab/tensorflow,mengxn/tensorflow,gautam1858/tensorflow,HaebinShin/tensorflow,suiyuan2009/tensorflow,with-git/tensorflow,aam-at/tensorflow,yanchen036/tensorflow,yanchen036/tensorflow,jendap/tensorflow,Carmezim/tensorflow,mrry/tensorflow,thesuperzapper/tensorflow,hfp/tensorflow-xsmm,ppries/tensorflow,strint/tensorflow,JVillella/tensorflow,benoitsteiner/tensorflow,haeusser/tensorflow,Intel-tensorflow/tensorflow,xzturn/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,DavidNorman/tensorflow,kevin-coder/tensorflow-fork,dendisuhubdy/tensorflow,abhitopia/tensorflow,peterbraden/tensorflow,av8ramit/tensorflow,yongtang/tensorflow,zycdragonball/tensorflow,llhe/tensorflow,thesuperzapper/tensorflow,ivano666/tensorflow,renyi533/tensorflow,chris-chris/tensorflow,sarvex/tensorflow,alisidd/tensorflow,JingJunYin/tensorflow,paolodedios/tensorflow,xodus7/tensorflow,nanditav/15712-TensorFlow,kamcpp/tensorflow,ibmsoe/tensorflow,AnishShah/tensorflow,ghchinoy/tensorflow,with-git/tensorflow,gojira/tensorflow,xodus7/tensorflow,caisq/tensorflow,zycdragonball/tensorflow,benoitsteiner/tensorflow-xsmm,HaebinShin/tensorflow,nolanliou/tensorflow,laosiaudi/tensorflow,eaplatanios/tensorflow,memo/tensorflow,odejesush/tensorflow,JingJunYin/tensorflow,freedomtan/tensorflow,4Quant/tensorflow,sandeepgupta2k4/tensorflow,Mazecreator/tensorflow,ageron/tensorflow,asimshankar/tensorflow,mengxn/tensorflow,asimshankar/tensorflow,mavenlin/tensorflow,ZhangXinNan/tensorflow,tiagofrepereira2012/tensorflow,ghchinoy/tensorflow,seanli9jan/tensorflow,ppwwyyxx/tensorflow,jalexvig/tensorflow,jbedorf/tensorflow,Xeralux/tensorflow,strint/tensorflow,allenlavoie/tensorflow,jostep/tensorflow,laszlocsomor/tensorflow,DavidNorman/tensorflow,eadgarchen/tensorflow,kobejean/tensorflow,Intel-tensorflow/tensorflow,martinbede/second-sight,moonboots/tensorflow,manazhao/tf_recsys,tensorflow/tensorflow-pywrap_tf_optimizer,thjashin/tensorflow,annarev/tensorflow,paolodedios/tensorflow,anand-c-goog/tensorflow,AndreasMadsen/tensorflow,alivecor/tensorflow,petewarden/tensorflow_makefile,ravindrapanda/tensorflow,Moriadry/tensorflow,ishay2b/tensorflow,ishay2b/tensorflow,elingg/tensorflow,lukeiwanski/tensorflow,renyi533/tensorflow,nikste/tensorflow,lukeiwanski/tensorflow,ran5515/DeepDecision,scenarios/tensorflow,jendap/tensorflow,tillahoffmann/tensorflow,Mistobaan/tensorflow,theflofly/tensorflow,AndreasMadsen/tensorflow,XueqingLin/tensorflow,hlt-mt/tensorflow,ppwwyyxx/tensorflow,davidzchen/tensorflow,nburn42/tensorflow,benoitsteiner/tensorflow,zasdfgbnm/tensorflow,meteorcloudy/tensorflow,annarev/tensorflow,laszlocsomor/tensorflow,manipopopo/tensorflow,brchiu/tensorflow,tongwang01/tensorflow,raymondxyang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,cxxgtxy/tensorflow,benoitsteiner/tensorflow,jart/tensorflow,SnakeJenny/TensorFlow,manazhao/tf_recsys,manjunaths/tensorflow,vrv/tensorflow,petewarden/tensorflow,lukeiwanski/tensorflow,gojira/tensorflow,kamcpp/tensorflow,ran5515/DeepDecision,alshedivat/tensorflow,nolanliou/tensorflow,gunan/tensorflow,gautam1858/tensorflow,hlt-mt/tensorflow,sjperkins/tensorflow,alivecor/tensorflow,ArtsiomCh/tensorflow,theflofly/tensorflow,jostep/tensorflow,yongtang/tensorflow,Xeralux/tensorflow,alheinecke/tensorflow-xsmm,TakayukiSakai/tensorflow,sandeepdsouza93/TensorFlow-15712,raymondxyang/tensorflow,scenarios/tensorflow,ninotoshi/tensorflow,alsrgv/tensorflow,snnn/tensorflow,jhseu/tensorflow,ppries/tensorflow,Bismarrck/tensorflow,markslwong/tensorflow,jhaux/tensorflow,annarev/tensorflow,eadgarchen/tensorflow,RyanYoung25/tensorflow,alsrgv/tensorflow,lakshayg/tensorflow,EvenStrangest/tensorflow,moonboots/tensorflow,ageron/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,laosiaudi/tensorflow,tillahoffmann/tensorflow,dhalleine/tensorflow,nburn42/tensorflow,ville-k/tensorflow,gautam1858/tensorflow,gojira/tensorflow,a-doumoulakis/tensorflow,asadziach/tensorflow,arborh/tensorflow,mrry/tensorflow,sandeepgupta2k4/tensorflow,arborh/tensorflow,paolodedios/tensorflow,4Quant/tensorflow,brchiu/tensorflow,llhe/tensorflow,tensorflow/tensorflow,dendisuhubdy/tensorflow,ychfan/tensorflow,johndpope/tensorflow,ageron/tensorflow,suiyuan2009/tensorflow,manipopopo/tensorflow,raymondxyang/tensorflow,chemelnucfin/tensorflow,AndreasMadsen/tensorflow,arborh/tensorflow,kevin-coder/tensorflow-fork,dyoung418/tensorflow,hsaputra/tensorflow,nburn42/tensorflow,davidzchen/tensorflow,code-sauce/tensorflow,TakayukiSakai/tensorflow,odejesush/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,raymondxyang/tensorflow,memo/tensorflow,unsiloai/syntaxnet-ops-hack,ninotoshi/tensorflow,Kongsea/tensorflow,laszlocsomor/tensorflow,HaebinShin/tensorflow,drpngx/tensorflow,zycdragonball/tensorflow,kchodorow/tensorflow,yufengg/tensorflow,chemelnucfin/tensorflow,ibab/tensorflow,jbedorf/tensorflow,vrv/tensorflow,andrewcmyers/tensorflow,petewarden/tensorflow_makefile,krikru/tensorflow-opencl,Xeralux/tensorflow,ville-k/tensorflow,xodus7/tensorflow,guschmue/tensorflow,adamtiger/tensorflow,manipopopo/tensorflow,tntnatbry/tensorflow,ageron/tensorflow,scenarios/tensorflow,hsaputra/tensorflow,gunan/tensorflow,nburn42/tensorflow,dancingdan/tensorflow,moonboots/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,RapidApplicationDevelopment/tensorflow,tntnatbry/tensorflow,a-doumoulakis/tensorflow,hfp/tensorflow-xsmm,xodus7/tensorflow,aselle/tensorflow,jeffzheng1/tensorflow,kevin-coder/tensorflow-fork,freedomtan/tensorflow,guschmue/tensorflow,ibab/tensorflow,annarev/tensorflow,seaotterman/tensorflow,manipopopo/tensorflow,alivecor/tensorflow,rabipanda/tensorflow,taknevski/tensorflow-xsmm,arborh/tensorflow,sarvex/tensorflow,cancan101/tensorflow,alistairlow/tensorflow,av8ramit/tensorflow,anand-c-goog/tensorflow,MoamerEncsConcordiaCa/tensorflow,DavidNorman/tensorflow,Mistobaan/tensorflow,AndreasMadsen/tensorflow,adit-chandra/tensorflow,shreyasva/tensorflow,sjperkins/tensorflow,Mazecreator/tensorflow,vrv/tensorflow,jwlawson/tensorflow,memo/tensorflow,wangyum/tensorflow,mdrumond/tensorflow,jostep/tensorflow,adit-chandra/tensorflow,martinbede/second-sight,Bismarrck/tensorflow,kamcpp/tensorflow,yaroslavvb/tensorflow,alheinecke/tensorflow-xsmm,bowang/tensorflow,4Quant/tensorflow,horance-liu/tensorflow,jart/tensorflow,johndpope/tensorflow,eaplatanios/tensorflow,Mazecreator/tensorflow,sandeepdsouza93/TensorFlow-15712,hfp/tensorflow-xsmm,jhaux/tensorflow,yongtang/tensorflow,lukas-krecan/tensorflow,benoitsteiner/tensorflow-xsmm,tomasreimers/tensorflow-emscripten,Carmezim/tensorflow,hfp/tensorflow-xsmm,with-git/tensorflow,tillahoffmann/tensorflow,ychfan/tensorflow,aam-at/tensorflow,chris-chris/tensorflow,MoamerEncsConcordiaCa/tensorflow,zycdragonball/tensorflow,drpngx/tensorflow,XueqingLin/tensorflow,ville-k/tensorflow,kamcpp/tensorflow,markslwong/tensorflow,Mistobaan/tensorflow,xzturn/tensorflow,strint/tensorflow,handroissuazo/tensorflow,adamtiger/tensorflow,horance-liu/tensorflow,dyoung418/tensorflow,gojira/tensorflow,zasdfgbnm/tensorflow,gojira/tensorflow
c
## Code Before: q /* Copyright 2015 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #ifndef TENSORFLOW_PLATFORM_DEFAULT_STREAM_EXECUTOR_UTIL_H_ #define TENSORFLOW_PLATFORM_DEFAULT_STREAM_EXECUTOR_UTIL_H_ // IWYU pragma: private, include "third_party/tensorflow/core/platform/stream_executor_util.h" // IWYU pragma: friend third_party/tensorflow/core/platform/stream_executor_util.h #include "tensorflow/stream_executor/lib/status.h" namespace tensorflow { namespace gpu = ::perftools::gputools; // On the open-source platform, stream_executor currently uses // tensorflow::Status inline Status FromStreamExecutorStatus( const perftools::gputools::port::Status& s) { return s; } } // namespace tensorflow #endif // TENSORFLOW_PLATFORM_DEFAULT_STREAM_EXECUTOR_UTIL_H_ ## Instruction: Fix build breakage from a typo. Change: 111528530 ## Code After: /* Copyright 2015 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #ifndef TENSORFLOW_PLATFORM_DEFAULT_STREAM_EXECUTOR_UTIL_H_ #define TENSORFLOW_PLATFORM_DEFAULT_STREAM_EXECUTOR_UTIL_H_ // IWYU pragma: private, include "third_party/tensorflow/core/platform/stream_executor_util.h" // IWYU pragma: friend third_party/tensorflow/core/platform/stream_executor_util.h #include "tensorflow/stream_executor/lib/status.h" namespace tensorflow { namespace gpu = ::perftools::gputools; // On the open-source platform, stream_executor currently uses // tensorflow::Status inline Status FromStreamExecutorStatus( const perftools::gputools::port::Status& s) { return s; } } // namespace tensorflow #endif // TENSORFLOW_PLATFORM_DEFAULT_STREAM_EXECUTOR_UTIL_H_
# ... existing code ... /* Copyright 2015 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #ifndef TENSORFLOW_PLATFORM_DEFAULT_STREAM_EXECUTOR_UTIL_H_ #define TENSORFLOW_PLATFORM_DEFAULT_STREAM_EXECUTOR_UTIL_H_ # ... rest of the code ...
3ac381d6d06d1de10a5eb2dd2ff7b49c5f3062c4
src/test/java/com/rarchives/ripme/tst/ripper/rippers/GfycatRipperTest.java
src/test/java/com/rarchives/ripme/tst/ripper/rippers/GfycatRipperTest.java
package com.rarchives.ripme.tst.ripper.rippers; import com.rarchives.ripme.ripper.rippers.GfycatRipper; import org.junit.jupiter.api.Test; import java.io.IOException; import java.net.URL; public class GfycatRipperTest extends RippersTest { /** * Rips correctly formatted URL directly from Gfycat * @throws IOException */ @Test public void testGfycatGoodURL() throws IOException{ GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/TemptingExcellentIchthyosaurs")); testRipper(ripper); } /** * Rips badly formatted URL directly from Gfycat * @throws IOException */ public void testGfycatBadURL() throws IOException { GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/gifs/detail/limitedtestyamericancrow")); testRipper(ripper); } public void testGfycatProfile() throws IOException { GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/@golbanstorage")); testRipper(ripper); } }
package com.rarchives.ripme.tst.ripper.rippers; import com.rarchives.ripme.ripper.rippers.GfycatRipper; import org.junit.jupiter.api.Test; import java.io.IOException; import java.net.URL; public class GfycatRipperTest extends RippersTest { /** * Rips correctly formatted URL directly from Gfycat * @throws IOException */ @Test public void testGfycatGoodURL() throws IOException{ GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/TemptingExcellentIchthyosaurs")); testRipper(ripper); } /** * Rips badly formatted URL directly from Gfycat * @throws IOException */ public void testGfycatBadURL() throws IOException { GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/gifs/detail/limitedtestyamericancrow")); testRipper(ripper); } /** * Rips a Gfycat profile * @throws IOException */ public void testGfycatProfile() throws IOException { GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/@golbanstorage")); testRipper(ripper); } /** * Rips a Gfycat amp link * @throws IOException */ public void testGfycatAmp() throws IOException { GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/amp/TemptingExcellentIchthyosaurs")); testRipper(ripper); } }
Add unit test for Gfycat Amp links
Add unit test for Gfycat Amp links
Java
mit
rephormat/ripme,sleaze/ripme,sleaze/ripme,rephormat/ripme,rephormat/ripme,sleaze/ripme
java
## Code Before: package com.rarchives.ripme.tst.ripper.rippers; import com.rarchives.ripme.ripper.rippers.GfycatRipper; import org.junit.jupiter.api.Test; import java.io.IOException; import java.net.URL; public class GfycatRipperTest extends RippersTest { /** * Rips correctly formatted URL directly from Gfycat * @throws IOException */ @Test public void testGfycatGoodURL() throws IOException{ GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/TemptingExcellentIchthyosaurs")); testRipper(ripper); } /** * Rips badly formatted URL directly from Gfycat * @throws IOException */ public void testGfycatBadURL() throws IOException { GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/gifs/detail/limitedtestyamericancrow")); testRipper(ripper); } public void testGfycatProfile() throws IOException { GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/@golbanstorage")); testRipper(ripper); } } ## Instruction: Add unit test for Gfycat Amp links ## Code After: package com.rarchives.ripme.tst.ripper.rippers; import com.rarchives.ripme.ripper.rippers.GfycatRipper; import org.junit.jupiter.api.Test; import java.io.IOException; import java.net.URL; public class GfycatRipperTest extends RippersTest { /** * Rips correctly formatted URL directly from Gfycat * @throws IOException */ @Test public void testGfycatGoodURL() throws IOException{ GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/TemptingExcellentIchthyosaurs")); testRipper(ripper); } /** * Rips badly formatted URL directly from Gfycat * @throws IOException */ public void testGfycatBadURL() throws IOException { GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/gifs/detail/limitedtestyamericancrow")); testRipper(ripper); } /** * Rips a Gfycat profile * @throws IOException */ public void testGfycatProfile() throws IOException { GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/@golbanstorage")); testRipper(ripper); } /** * Rips a Gfycat amp link * @throws IOException */ public void testGfycatAmp() throws IOException { GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/amp/TemptingExcellentIchthyosaurs")); testRipper(ripper); } }
// ... existing code ... testRipper(ripper); } /** * Rips a Gfycat profile * @throws IOException */ public void testGfycatProfile() throws IOException { GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/@golbanstorage")); testRipper(ripper); } /** * Rips a Gfycat amp link * @throws IOException */ public void testGfycatAmp() throws IOException { GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/amp/TemptingExcellentIchthyosaurs")); testRipper(ripper); } } // ... rest of the code ...
f3f440380bd67ec450160f7bc34fa67f6bfa48d5
app/src/main/java/zero/zd/zquestionnaire/MainActivity.java
app/src/main/java/zero/zd/zquestionnaire/MainActivity.java
package zero.zd.zquestionnaire; import android.os.Bundle; import android.os.Environment; import android.support.v7.app.AppCompatActivity; import android.view.View; import java.io.File; public class MainActivity extends AppCompatActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // create a folder File folder = new File(Environment.getExternalStorageDirectory().getPath() + "/ZQuestionnaire/"); if (!folder.exists()) folder.mkdirs(); } public void onClickAnswer(View view) { startActivity(LoadQnaActivity.getStartIntent(MainActivity.this)); } public void onClickBuilder(View view) { startActivity(QnaBuilderActivity.getStartIntent(MainActivity.this)); } }
package zero.zd.zquestionnaire; import android.os.Bundle; import android.os.Environment; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.View; import java.io.File; public class MainActivity extends AppCompatActivity { private static final String TAG = MainActivity.class.getSimpleName(); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // create a folder File folder = new File(Environment.getExternalStorageDirectory().getPath() + "/ZQuestionnaire/"); if (!folder.exists()) { if (!folder.mkdirs()) { Log.e(TAG, "Failed on creating folders."); } } } public void onClickAnswer(View view) { startActivity(LoadQnaActivity.getStartIntent(MainActivity.this)); } public void onClickBuilder(View view) { startActivity(QnaBuilderActivity.getStartIntent(MainActivity.this)); } }
Add Logs on Filed Creation of Folders
:speaker: Add Logs on Filed Creation of Folders
Java
mit
zd-zero/ZQuestionnaire
java
## Code Before: package zero.zd.zquestionnaire; import android.os.Bundle; import android.os.Environment; import android.support.v7.app.AppCompatActivity; import android.view.View; import java.io.File; public class MainActivity extends AppCompatActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // create a folder File folder = new File(Environment.getExternalStorageDirectory().getPath() + "/ZQuestionnaire/"); if (!folder.exists()) folder.mkdirs(); } public void onClickAnswer(View view) { startActivity(LoadQnaActivity.getStartIntent(MainActivity.this)); } public void onClickBuilder(View view) { startActivity(QnaBuilderActivity.getStartIntent(MainActivity.this)); } } ## Instruction: :speaker: Add Logs on Filed Creation of Folders ## Code After: package zero.zd.zquestionnaire; import android.os.Bundle; import android.os.Environment; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.View; import java.io.File; public class MainActivity extends AppCompatActivity { private static final String TAG = MainActivity.class.getSimpleName(); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // create a folder File folder = new File(Environment.getExternalStorageDirectory().getPath() + "/ZQuestionnaire/"); if (!folder.exists()) { if (!folder.mkdirs()) { Log.e(TAG, "Failed on creating folders."); } } } public void onClickAnswer(View view) { startActivity(LoadQnaActivity.getStartIntent(MainActivity.this)); } public void onClickBuilder(View view) { startActivity(QnaBuilderActivity.getStartIntent(MainActivity.this)); } }
... import android.os.Bundle; import android.os.Environment; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.View; import java.io.File; public class MainActivity extends AppCompatActivity { private static final String TAG = MainActivity.class.getSimpleName(); @Override protected void onCreate(Bundle savedInstanceState) { ... // create a folder File folder = new File(Environment.getExternalStorageDirectory().getPath() + "/ZQuestionnaire/"); if (!folder.exists()) { if (!folder.mkdirs()) { Log.e(TAG, "Failed on creating folders."); } } } public void onClickAnswer(View view) { ...
eb453010915f6700edd1baa0febcc634deec81dc
src/viewsapp/views.py
src/viewsapp/views.py
from decorator_plus import ( require_form_methods, require_safe_methods) from django.shortcuts import ( get_object_or_404, redirect, render) from .forms import ExampleForm from .models import ExampleModel @require_safe_methods def model_detail(request, *args, **kwargs): request_slug = kwargs.get('slug') example_obj = get_object_or_404( ExampleModel, slug=request_slug) return render( request, 'viewsapp/detail.html', {'object': example_obj}) @require_form_methods def model_create(request, *args, **kwargs): if request.method == 'POST': form = ExampleForm(request.POST) if form.is_valid(): new_obj = form.save() return redirect(new_obj) else: form = ExampleForm() return render( request, 'viewsapp/form.html', {'form': form})
from decorator_plus import require_http_methods from django.shortcuts import ( get_object_or_404, redirect, render) from .forms import ExampleForm from .models import ExampleModel @require_http_methods(['GET']) def model_detail(request, *args, **kwargs): request_slug = kwargs.get('slug') example_obj = get_object_or_404( ExampleModel, slug=request_slug) return render( request, 'viewsapp/detail.html', {'object': example_obj}) @require_http_methods(['GET', 'POST']) def model_create(request, *args, **kwargs): if request.method == 'POST': form = ExampleForm(request.POST) if form.is_valid(): new_obj = form.save() return redirect(new_obj) else: form = ExampleForm() return render( request, 'viewsapp/form.html', {'form': form})
Switch to using require_http_methods decorator.
Switch to using require_http_methods decorator.
Python
bsd-2-clause
jambonrose/djangocon2015-views,jambonrose/djangocon2015-views
python
## Code Before: from decorator_plus import ( require_form_methods, require_safe_methods) from django.shortcuts import ( get_object_or_404, redirect, render) from .forms import ExampleForm from .models import ExampleModel @require_safe_methods def model_detail(request, *args, **kwargs): request_slug = kwargs.get('slug') example_obj = get_object_or_404( ExampleModel, slug=request_slug) return render( request, 'viewsapp/detail.html', {'object': example_obj}) @require_form_methods def model_create(request, *args, **kwargs): if request.method == 'POST': form = ExampleForm(request.POST) if form.is_valid(): new_obj = form.save() return redirect(new_obj) else: form = ExampleForm() return render( request, 'viewsapp/form.html', {'form': form}) ## Instruction: Switch to using require_http_methods decorator. ## Code After: from decorator_plus import require_http_methods from django.shortcuts import ( get_object_or_404, redirect, render) from .forms import ExampleForm from .models import ExampleModel @require_http_methods(['GET']) def model_detail(request, *args, **kwargs): request_slug = kwargs.get('slug') example_obj = get_object_or_404( ExampleModel, slug=request_slug) return render( request, 'viewsapp/detail.html', {'object': example_obj}) @require_http_methods(['GET', 'POST']) def model_create(request, *args, **kwargs): if request.method == 'POST': form = ExampleForm(request.POST) if form.is_valid(): new_obj = form.save() return redirect(new_obj) else: form = ExampleForm() return render( request, 'viewsapp/form.html', {'form': form})
... from decorator_plus import require_http_methods from django.shortcuts import ( get_object_or_404, redirect, render) ... from .models import ExampleModel @require_http_methods(['GET']) def model_detail(request, *args, **kwargs): request_slug = kwargs.get('slug') example_obj = get_object_or_404( ... {'object': example_obj}) @require_http_methods(['GET', 'POST']) def model_create(request, *args, **kwargs): if request.method == 'POST': form = ExampleForm(request.POST) ...
6c72b3e07b950bfc1a7d129cf80ef2ae06ce9fa2
test/test_all.c
test/test_all.c
CuSuite* StrUtilGetSuite(); CuSuite* make_regex_suite(); CuSuite* make_csv_suite(); void RunAllTests(void) { CuString *output = CuStringNew(); CuSuite* suite = CuSuiteNew(); CuSuiteAddSuite(suite, StrUtilGetSuite()); CuSuiteAddSuite(suite, make_regex_suite()); CuSuiteAddSuite(suite, make_csv_suite()); CuSuiteRun(suite); CuSuiteSummary(suite, output); CuSuiteDetails(suite, output); printf("%s\n", output->buffer); } int main(void) { out_fd = stdout; /*For Logging*/ RunAllTests(); }
CuSuite* StrUtilGetSuite(); CuSuite* make_regex_suite(); CuSuite* make_csv_suite(); void RunAllTests(void) { CuString *output = CuStringNew(); CuSuite* suite = CuSuiteNew(); #if 1 CuSuiteAddSuite(suite, StrUtilGetSuite()); CuSuiteAddSuite(suite, make_regex_suite()); CuSuiteAddSuite(suite, make_csv_suite()); #endif CuSuiteRun(suite); CuSuiteSummary(suite, output); CuSuiteDetails(suite, output); printf("%s\n", output->buffer); } int main(void) { out_fd = stdout; /*For Logging*/ RunAllTests(); }
Add possibility to disable tests.
Add possibility to disable tests.
C
apache-2.0
Moddus/mpi_search,Moddus/mpi_search,Moddus/mpi_search,Moddus/mpi_search
c
## Code Before: CuSuite* StrUtilGetSuite(); CuSuite* make_regex_suite(); CuSuite* make_csv_suite(); void RunAllTests(void) { CuString *output = CuStringNew(); CuSuite* suite = CuSuiteNew(); CuSuiteAddSuite(suite, StrUtilGetSuite()); CuSuiteAddSuite(suite, make_regex_suite()); CuSuiteAddSuite(suite, make_csv_suite()); CuSuiteRun(suite); CuSuiteSummary(suite, output); CuSuiteDetails(suite, output); printf("%s\n", output->buffer); } int main(void) { out_fd = stdout; /*For Logging*/ RunAllTests(); } ## Instruction: Add possibility to disable tests. ## Code After: CuSuite* StrUtilGetSuite(); CuSuite* make_regex_suite(); CuSuite* make_csv_suite(); void RunAllTests(void) { CuString *output = CuStringNew(); CuSuite* suite = CuSuiteNew(); #if 1 CuSuiteAddSuite(suite, StrUtilGetSuite()); CuSuiteAddSuite(suite, make_regex_suite()); CuSuiteAddSuite(suite, make_csv_suite()); #endif CuSuiteRun(suite); CuSuiteSummary(suite, output); CuSuiteDetails(suite, output); printf("%s\n", output->buffer); } int main(void) { out_fd = stdout; /*For Logging*/ RunAllTests(); }
... CuString *output = CuStringNew(); CuSuite* suite = CuSuiteNew(); #if 1 CuSuiteAddSuite(suite, StrUtilGetSuite()); CuSuiteAddSuite(suite, make_regex_suite()); CuSuiteAddSuite(suite, make_csv_suite()); #endif CuSuiteRun(suite); CuSuiteSummary(suite, output); ...
85d0bc9fbb20daeff9aa48a83be1823fa346cb9c
tests/test_helpers.py
tests/test_helpers.py
from __future__ import unicode_literals import pytest import types from rakuten_ws.webservice import RakutenWebService from rakuten_ws.base import RakutenAPIResponse @pytest.mark.online def test_response(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") assert isinstance(response, RakutenAPIResponse) @pytest.mark.online def test_single_item(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") item = response['Items'][0] assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa @pytest.mark.online def test_item_pages(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") items = response.pages() # search should also allow to retrieve all the available responses # within a generator assert isinstance(items, types.GeneratorType) # The iteration should switch to the next page assert items.next()['page'] == 1 assert items.next()['page'] == 2
from __future__ import unicode_literals import pytest import types from rakuten_ws.webservice import RakutenWebService from rakuten_ws.base import RakutenAPIResponse @pytest.mark.online def test_response(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") assert isinstance(response, RakutenAPIResponse) @pytest.mark.online def test_single_item(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") item = response['Items'][0] assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa @pytest.mark.online def test_item_pages(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") items = response.pages() # search should also allow to retrieve all the available responses # within a generator assert isinstance(items, types.GeneratorType) # The iteration should switch to the next page assert next(items)['page'] == 1 assert next(items)['page'] == 2
Fix tests for Python 3
Fix tests for Python 3
Python
mit
alexandriagroup/rakuten-ws
python
## Code Before: from __future__ import unicode_literals import pytest import types from rakuten_ws.webservice import RakutenWebService from rakuten_ws.base import RakutenAPIResponse @pytest.mark.online def test_response(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") assert isinstance(response, RakutenAPIResponse) @pytest.mark.online def test_single_item(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") item = response['Items'][0] assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa @pytest.mark.online def test_item_pages(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") items = response.pages() # search should also allow to retrieve all the available responses # within a generator assert isinstance(items, types.GeneratorType) # The iteration should switch to the next page assert items.next()['page'] == 1 assert items.next()['page'] == 2 ## Instruction: Fix tests for Python 3 ## Code After: from __future__ import unicode_literals import pytest import types from rakuten_ws.webservice import RakutenWebService from rakuten_ws.base import RakutenAPIResponse @pytest.mark.online def test_response(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") assert isinstance(response, RakutenAPIResponse) @pytest.mark.online def test_single_item(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") item = response['Items'][0] assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa @pytest.mark.online def test_item_pages(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") items = response.pages() # search should also allow to retrieve all the available responses # within a generator assert isinstance(items, types.GeneratorType) # The iteration should switch to the next page assert next(items)['page'] == 1 assert next(items)['page'] == 2
... # within a generator assert isinstance(items, types.GeneratorType) # The iteration should switch to the next page assert next(items)['page'] == 1 assert next(items)['page'] == 2 ...
bcd7f8f3d7313538ab1c04da9c42e774350ccdfe
ui/widgets/histogram/TrackingHistogramWidget.py
ui/widgets/histogram/TrackingHistogramWidget.py
from PySide.QtGui import * from PySide.QtCore import * from HistogramWidget import HistogramWidget from TrackingNodeItem import TrackingNodeItem class TrackingHistogramWidget(HistogramWidget): """ TrackingHistogramWidget """ updatePosition = Signal(float) def __init__(self): super(TrackingHistogramWidget, self).__init__() self.nodeItem = None def update(self): super(TrackingHistogramWidget, self).update() if not self.nodeItem: return self.nodeItem.update() def setHistogram(self, histogram): super(TrackingHistogramWidget, self).setHistogram(histogram) if not self.nodeItem: self.nodeItem = TrackingNodeItem() self.scene().addItem(self.nodeItem) self.nodeItem.setHistogramItem(self._histogramItem) self.nodeItem.setPos(QPoint(0, 0)) self.nodeItem.setZValue(300) self.nodeItem.delegate = self def updatePos(self, position): self.updatePosition.emit(position)
from PySide.QtGui import * from PySide.QtCore import * from HistogramWidget import HistogramWidget from TrackingNodeItem import TrackingNodeItem from ui.widgets import Style class TrackingHistogramWidget(HistogramWidget): """ TrackingHistogramWidget """ updatePosition = Signal(float) def __init__(self): super(TrackingHistogramWidget, self).__init__() self.nodeItem = None Style.styleWidgetForTab(self) def update(self): super(TrackingHistogramWidget, self).update() if not self.nodeItem: return self.nodeItem.update() def setHistogram(self, histogram): super(TrackingHistogramWidget, self).setHistogram(histogram) if not self.nodeItem: self.nodeItem = TrackingNodeItem() self.scene().addItem(self.nodeItem) self.nodeItem.setHistogramItem(self._histogramItem) self.nodeItem.setPos(QPoint(0, 0)) self.nodeItem.setZValue(300) self.nodeItem.delegate = self def updatePos(self, position): self.updatePosition.emit(position)
Fix background color on OS X for histogram widget of ray.
Fix background color on OS X for histogram widget of ray.
Python
mit
berendkleinhaneveld/Registrationshop,berendkleinhaneveld/Registrationshop
python
## Code Before: from PySide.QtGui import * from PySide.QtCore import * from HistogramWidget import HistogramWidget from TrackingNodeItem import TrackingNodeItem class TrackingHistogramWidget(HistogramWidget): """ TrackingHistogramWidget """ updatePosition = Signal(float) def __init__(self): super(TrackingHistogramWidget, self).__init__() self.nodeItem = None def update(self): super(TrackingHistogramWidget, self).update() if not self.nodeItem: return self.nodeItem.update() def setHistogram(self, histogram): super(TrackingHistogramWidget, self).setHistogram(histogram) if not self.nodeItem: self.nodeItem = TrackingNodeItem() self.scene().addItem(self.nodeItem) self.nodeItem.setHistogramItem(self._histogramItem) self.nodeItem.setPos(QPoint(0, 0)) self.nodeItem.setZValue(300) self.nodeItem.delegate = self def updatePos(self, position): self.updatePosition.emit(position) ## Instruction: Fix background color on OS X for histogram widget of ray. ## Code After: from PySide.QtGui import * from PySide.QtCore import * from HistogramWidget import HistogramWidget from TrackingNodeItem import TrackingNodeItem from ui.widgets import Style class TrackingHistogramWidget(HistogramWidget): """ TrackingHistogramWidget """ updatePosition = Signal(float) def __init__(self): super(TrackingHistogramWidget, self).__init__() self.nodeItem = None Style.styleWidgetForTab(self) def update(self): super(TrackingHistogramWidget, self).update() if not self.nodeItem: return self.nodeItem.update() def setHistogram(self, histogram): super(TrackingHistogramWidget, self).setHistogram(histogram) if not self.nodeItem: self.nodeItem = TrackingNodeItem() self.scene().addItem(self.nodeItem) self.nodeItem.setHistogramItem(self._histogramItem) self.nodeItem.setPos(QPoint(0, 0)) self.nodeItem.setZValue(300) self.nodeItem.delegate = self def updatePos(self, position): self.updatePosition.emit(position)
// ... existing code ... from PySide.QtCore import * from HistogramWidget import HistogramWidget from TrackingNodeItem import TrackingNodeItem from ui.widgets import Style class TrackingHistogramWidget(HistogramWidget): // ... modified code ... def __init__(self): super(TrackingHistogramWidget, self).__init__() self.nodeItem = None Style.styleWidgetForTab(self) def update(self): super(TrackingHistogramWidget, self).update() // ... rest of the code ...
0699830cd0dd5b606647c28c7e4b0965c418e6b8
MenuItemKit/MenuItemKit.h
MenuItemKit/MenuItemKit.h
// // MenuItemKit.h // MenuItemKit // // Created by CHEN Xian’an on 1/16/16. // Copyright © 2016 lazyapps. All rights reserved. // #import <UIKit/UIKit.h> #import "Headers.h" //! Project version number for MenuItemKit. FOUNDATION_EXPORT double MenuItemKitVersionNumber; //! Project version string for MenuItemKit. FOUNDATION_EXPORT const unsigned char MenuItemKitVersionString[];
// // MenuItemKit.h // MenuItemKit // // Created by CHEN Xian’an on 1/16/16. // Copyright © 2016 lazyapps. All rights reserved. // #import <UIKit/UIKit.h> #import <MenuItemKit/Headers.h> //! Project version number for MenuItemKit. FOUNDATION_EXPORT double MenuItemKitVersionNumber; //! Project version string for MenuItemKit. FOUNDATION_EXPORT const unsigned char MenuItemKitVersionString[];
Use brackets for headers as Xcode suggested
Use brackets for headers as Xcode suggested
C
mit
cxa/MenuItemKit,cxa/MenuItemKit
c
## Code Before: // // MenuItemKit.h // MenuItemKit // // Created by CHEN Xian’an on 1/16/16. // Copyright © 2016 lazyapps. All rights reserved. // #import <UIKit/UIKit.h> #import "Headers.h" //! Project version number for MenuItemKit. FOUNDATION_EXPORT double MenuItemKitVersionNumber; //! Project version string for MenuItemKit. FOUNDATION_EXPORT const unsigned char MenuItemKitVersionString[]; ## Instruction: Use brackets for headers as Xcode suggested ## Code After: // // MenuItemKit.h // MenuItemKit // // Created by CHEN Xian’an on 1/16/16. // Copyright © 2016 lazyapps. All rights reserved. // #import <UIKit/UIKit.h> #import <MenuItemKit/Headers.h> //! Project version number for MenuItemKit. FOUNDATION_EXPORT double MenuItemKitVersionNumber; //! Project version string for MenuItemKit. FOUNDATION_EXPORT const unsigned char MenuItemKitVersionString[];
# ... existing code ... // #import <UIKit/UIKit.h> #import <MenuItemKit/Headers.h> //! Project version number for MenuItemKit. FOUNDATION_EXPORT double MenuItemKitVersionNumber; # ... rest of the code ...
3e278c99fdb82b839fafd8972402440e952c2cd4
zephyr/projects/volteer/include/i2c_map.h
zephyr/projects/volteer/include/i2c_map.h
/* Copyright 2020 The Chromium OS Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef __ZEPHYR_CHROME_I2C_MAP_H #define __ZEPHYR_CHROME_I2C_MAP_H #include <devicetree.h> #include "config.h" /* We need registers.h to get the chip specific defines for now */ #include "registers.h" #define I2C_PORT_ACCEL I2C_PORT_SENSOR #define I2C_PORT_SENSOR NPCX_I2C_PORT0_0 #define I2C_PORT_USB_C0 NPCX_I2C_PORT1_0 #define I2C_PORT_USB_C1 NPCX_I2C_PORT2_0 #define I2C_PORT_USB_1_MIX NPCX_I2C_PORT3_0 #define I2C_PORT_POWER NPCX_I2C_PORT5_0 #define I2C_PORT_EEPROM NPCX_I2C_PORT7_0 #define I2C_ADDR_EEPROM_FLAGS 0x50 #endif /* __ZEPHYR_CHROME_I2C_MAP_H */
/* Copyright 2020 The Chromium OS Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef __ZEPHYR_CHROME_I2C_MAP_H #define __ZEPHYR_CHROME_I2C_MAP_H #include <devicetree.h> #include "config.h" /* We need registers.h to get the chip specific defines for now */ #include "i2c/i2c.h" #define I2C_PORT_ACCEL I2C_PORT_SENSOR #define I2C_PORT_SENSOR NAMED_I2C(sensor) #define I2C_PORT_USB_C0 NAMED_I2C(usb_c0) #define I2C_PORT_USB_C1 NAMED_I2C(usb_c1) #define I2C_PORT_USB_1_MIX NAMED_I2C(usb1_mix) #define I2C_PORT_POWER NAMED_I2C(power) #define I2C_PORT_EEPROM NAMED_I2C(eeprom) #define I2C_ADDR_EEPROM_FLAGS 0x50 #endif /* __ZEPHYR_CHROME_I2C_MAP_H */
Remove dependency on npcx specific registers
volteer: Remove dependency on npcx specific registers This change removes the dependency on the npcx specific headers which are normally included via registers.h. It instead transitions to relying on i2c/i2c.h which defines various enums and the NAMED_I2C macro. BUG=b:175249000 TEST=zmake testall Cq-Depend: chromium:2582819 Change-Id: I7d8e98cc4228496b0c7603c0794eb92e0f79c01d Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/zephyr-chrome/+/2583272 Tested-by: Jack Rosenthal <[email protected]> Reviewed-by: Jack Rosenthal <[email protected]> Commit-Queue: Jack Rosenthal <[email protected]> Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/ec/+/2630155 Reviewed-by: Simon Glass <[email protected]>
C
bsd-3-clause
coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec
c
## Code Before: /* Copyright 2020 The Chromium OS Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef __ZEPHYR_CHROME_I2C_MAP_H #define __ZEPHYR_CHROME_I2C_MAP_H #include <devicetree.h> #include "config.h" /* We need registers.h to get the chip specific defines for now */ #include "registers.h" #define I2C_PORT_ACCEL I2C_PORT_SENSOR #define I2C_PORT_SENSOR NPCX_I2C_PORT0_0 #define I2C_PORT_USB_C0 NPCX_I2C_PORT1_0 #define I2C_PORT_USB_C1 NPCX_I2C_PORT2_0 #define I2C_PORT_USB_1_MIX NPCX_I2C_PORT3_0 #define I2C_PORT_POWER NPCX_I2C_PORT5_0 #define I2C_PORT_EEPROM NPCX_I2C_PORT7_0 #define I2C_ADDR_EEPROM_FLAGS 0x50 #endif /* __ZEPHYR_CHROME_I2C_MAP_H */ ## Instruction: volteer: Remove dependency on npcx specific registers This change removes the dependency on the npcx specific headers which are normally included via registers.h. It instead transitions to relying on i2c/i2c.h which defines various enums and the NAMED_I2C macro. BUG=b:175249000 TEST=zmake testall Cq-Depend: chromium:2582819 Change-Id: I7d8e98cc4228496b0c7603c0794eb92e0f79c01d Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/zephyr-chrome/+/2583272 Tested-by: Jack Rosenthal <[email protected]> Reviewed-by: Jack Rosenthal <[email protected]> Commit-Queue: Jack Rosenthal <[email protected]> Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/ec/+/2630155 Reviewed-by: Simon Glass <[email protected]> ## Code After: /* Copyright 2020 The Chromium OS Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef __ZEPHYR_CHROME_I2C_MAP_H #define __ZEPHYR_CHROME_I2C_MAP_H #include <devicetree.h> #include "config.h" /* We need registers.h to get the chip specific defines for now */ #include "i2c/i2c.h" #define I2C_PORT_ACCEL I2C_PORT_SENSOR #define I2C_PORT_SENSOR NAMED_I2C(sensor) #define I2C_PORT_USB_C0 NAMED_I2C(usb_c0) #define I2C_PORT_USB_C1 NAMED_I2C(usb_c1) #define I2C_PORT_USB_1_MIX NAMED_I2C(usb1_mix) #define I2C_PORT_POWER NAMED_I2C(power) #define I2C_PORT_EEPROM NAMED_I2C(eeprom) #define I2C_ADDR_EEPROM_FLAGS 0x50 #endif /* __ZEPHYR_CHROME_I2C_MAP_H */
... #include "config.h" /* We need registers.h to get the chip specific defines for now */ #include "i2c/i2c.h" #define I2C_PORT_ACCEL I2C_PORT_SENSOR #define I2C_PORT_SENSOR NAMED_I2C(sensor) #define I2C_PORT_USB_C0 NAMED_I2C(usb_c0) #define I2C_PORT_USB_C1 NAMED_I2C(usb_c1) #define I2C_PORT_USB_1_MIX NAMED_I2C(usb1_mix) #define I2C_PORT_POWER NAMED_I2C(power) #define I2C_PORT_EEPROM NAMED_I2C(eeprom) #define I2C_ADDR_EEPROM_FLAGS 0x50 ...
ff0b6270e68130d8fce9ee251c22df63b022a840
common/src/main/java/com/gentics/mesh/util/SearchWaitUtil.java
common/src/main/java/com/gentics/mesh/util/SearchWaitUtil.java
package com.gentics.mesh.util; import com.gentics.mesh.core.rest.MeshEvent; import com.gentics.mesh.etc.config.MeshOptions; import com.gentics.mesh.event.MeshEventSender; import com.gentics.mesh.parameter.ParameterProviderContext; import io.reactivex.Completable; import javax.inject.Inject; import javax.inject.Singleton; @Singleton public class SearchWaitUtil { @Inject public MeshEventSender meshEventSender; @Inject public MeshOptions options; @Inject public SearchWaitUtil() { } public boolean delayRequested(ParameterProviderContext ppc) { return ppc.getSearchParameters().isWait() .orElseGet(options.getSearchOptions()::isWaitForIdle); } public Completable awaitSync(ParameterProviderContext ppc) { if (!delayRequested(ppc)) { return Completable.complete(); } return meshEventSender.isSearchIdle().flatMapCompletable(isIdle -> { if (isIdle) { return Completable.complete(); } meshEventSender.flushSearch(); return meshEventSender.waitForEvent(MeshEvent.SEARCH_IDLE); }).andThen(meshEventSender.refreshSearch()); } }
package com.gentics.mesh.util; import com.gentics.mesh.core.rest.MeshEvent; import com.gentics.mesh.etc.config.MeshOptions; import com.gentics.mesh.etc.config.search.ElasticSearchOptions; import com.gentics.mesh.event.MeshEventSender; import com.gentics.mesh.parameter.ParameterProviderContext; import io.reactivex.Completable; import javax.inject.Inject; import javax.inject.Singleton; @Singleton public class SearchWaitUtil { @Inject public MeshEventSender meshEventSender; @Inject public MeshOptions options; @Inject public SearchWaitUtil() { } public boolean delayRequested(ParameterProviderContext ppc) { return ppc.getSearchParameters().isWait() .orElseGet(options.getSearchOptions()::isWaitForIdle); } public Completable awaitSync(ParameterProviderContext ppc) { if (!delayRequested(ppc)) { return Completable.complete(); } // We don't have to wait if no search is configured ElasticSearchOptions searchOptions = options.getSearchOptions(); if (searchOptions == null || searchOptions.getUrl() == null) { return Completable.complete(); } return meshEventSender.isSearchIdle().flatMapCompletable(isIdle -> { if (isIdle) { return Completable.complete(); } meshEventSender.flushSearch(); return meshEventSender.waitForEvent(MeshEvent.SEARCH_IDLE); }).andThen(meshEventSender.refreshSearch()); } }
Fix GraphQl if search is not configured
Fix GraphQl if search is not configured
Java
apache-2.0
gentics/mesh,gentics/mesh,gentics/mesh,gentics/mesh
java
## Code Before: package com.gentics.mesh.util; import com.gentics.mesh.core.rest.MeshEvent; import com.gentics.mesh.etc.config.MeshOptions; import com.gentics.mesh.event.MeshEventSender; import com.gentics.mesh.parameter.ParameterProviderContext; import io.reactivex.Completable; import javax.inject.Inject; import javax.inject.Singleton; @Singleton public class SearchWaitUtil { @Inject public MeshEventSender meshEventSender; @Inject public MeshOptions options; @Inject public SearchWaitUtil() { } public boolean delayRequested(ParameterProviderContext ppc) { return ppc.getSearchParameters().isWait() .orElseGet(options.getSearchOptions()::isWaitForIdle); } public Completable awaitSync(ParameterProviderContext ppc) { if (!delayRequested(ppc)) { return Completable.complete(); } return meshEventSender.isSearchIdle().flatMapCompletable(isIdle -> { if (isIdle) { return Completable.complete(); } meshEventSender.flushSearch(); return meshEventSender.waitForEvent(MeshEvent.SEARCH_IDLE); }).andThen(meshEventSender.refreshSearch()); } } ## Instruction: Fix GraphQl if search is not configured ## Code After: package com.gentics.mesh.util; import com.gentics.mesh.core.rest.MeshEvent; import com.gentics.mesh.etc.config.MeshOptions; import com.gentics.mesh.etc.config.search.ElasticSearchOptions; import com.gentics.mesh.event.MeshEventSender; import com.gentics.mesh.parameter.ParameterProviderContext; import io.reactivex.Completable; import javax.inject.Inject; import javax.inject.Singleton; @Singleton public class SearchWaitUtil { @Inject public MeshEventSender meshEventSender; @Inject public MeshOptions options; @Inject public SearchWaitUtil() { } public boolean delayRequested(ParameterProviderContext ppc) { return ppc.getSearchParameters().isWait() .orElseGet(options.getSearchOptions()::isWaitForIdle); } public Completable awaitSync(ParameterProviderContext ppc) { if (!delayRequested(ppc)) { return Completable.complete(); } // We don't have to wait if no search is configured ElasticSearchOptions searchOptions = options.getSearchOptions(); if (searchOptions == null || searchOptions.getUrl() == null) { return Completable.complete(); } return meshEventSender.isSearchIdle().flatMapCompletable(isIdle -> { if (isIdle) { return Completable.complete(); } meshEventSender.flushSearch(); return meshEventSender.waitForEvent(MeshEvent.SEARCH_IDLE); }).andThen(meshEventSender.refreshSearch()); } }
... import com.gentics.mesh.core.rest.MeshEvent; import com.gentics.mesh.etc.config.MeshOptions; import com.gentics.mesh.etc.config.search.ElasticSearchOptions; import com.gentics.mesh.event.MeshEventSender; import com.gentics.mesh.parameter.ParameterProviderContext; import io.reactivex.Completable; ... return Completable.complete(); } // We don't have to wait if no search is configured ElasticSearchOptions searchOptions = options.getSearchOptions(); if (searchOptions == null || searchOptions.getUrl() == null) { return Completable.complete(); } return meshEventSender.isSearchIdle().flatMapCompletable(isIdle -> { if (isIdle) { return Completable.complete(); ...
64557b736e53df743e7807654c735979b5c5f939
paystack/src/main/java/co/paystack/android/api/request/ValidateRequestBody.java
paystack/src/main/java/co/paystack/android/api/request/ValidateRequestBody.java
package co.paystack.android.api.request; import com.google.gson.annotations.SerializedName; import java.io.Serializable; import java.util.HashMap; /** * Created by i on 24/08/2016. */ public class ValidateRequestBody extends BaseRequestBody implements Serializable { public static final String FIELD_TRANS = "trans"; public static final String FIELD_TOKEN = "token"; @SerializedName(FIELD_TRANS) public String trans; @SerializedName(FIELD_TOKEN) public String token; public ValidateRequestBody() { } public ValidateRequestBody(String trans, String token) { this.trans = trans; this.token = token; } @Override public HashMap<String, String> getParamsHashMap() { HashMap<String, String> params = new HashMap<>(); params.put(FIELD_TRANS, trans); params.put(FIELD_TOKEN, token); return params; } }
package co.paystack.android.api.request; import com.google.gson.annotations.SerializedName; import java.io.Serializable; import java.util.HashMap; /** * Created by i on 24/08/2016. */ public class ValidateRequestBody extends BaseRequestBody implements Serializable { public static final String FIELD_TRANS = "trans"; public static final String FIELD_TOKEN = "token"; @SerializedName(FIELD_TRANS) public String trans; @SerializedName(FIELD_TOKEN) public String token; public ValidateRequestBody() { } public String getTrans() { return trans; } public ValidateRequestBody setTrans(String trans) { this.trans = trans; return this; } public String getToken() { return token; } public ValidateRequestBody setToken(String token) { this.token = token; return this; } @Override public HashMap<String, String> getParamsHashMap() { HashMap<String, String> params = new HashMap<>(); params.put(FIELD_TRANS, getTrans()); params.put(FIELD_TOKEN, getToken()); return params; } }
Validate request body get and set fields
Validate request body get and set fields
Java
apache-2.0
PaystackHQ/paystack-android,PaystackHQ/paystack-android,PaystackHQ/paystack-android
java
## Code Before: package co.paystack.android.api.request; import com.google.gson.annotations.SerializedName; import java.io.Serializable; import java.util.HashMap; /** * Created by i on 24/08/2016. */ public class ValidateRequestBody extends BaseRequestBody implements Serializable { public static final String FIELD_TRANS = "trans"; public static final String FIELD_TOKEN = "token"; @SerializedName(FIELD_TRANS) public String trans; @SerializedName(FIELD_TOKEN) public String token; public ValidateRequestBody() { } public ValidateRequestBody(String trans, String token) { this.trans = trans; this.token = token; } @Override public HashMap<String, String> getParamsHashMap() { HashMap<String, String> params = new HashMap<>(); params.put(FIELD_TRANS, trans); params.put(FIELD_TOKEN, token); return params; } } ## Instruction: Validate request body get and set fields ## Code After: package co.paystack.android.api.request; import com.google.gson.annotations.SerializedName; import java.io.Serializable; import java.util.HashMap; /** * Created by i on 24/08/2016. */ public class ValidateRequestBody extends BaseRequestBody implements Serializable { public static final String FIELD_TRANS = "trans"; public static final String FIELD_TOKEN = "token"; @SerializedName(FIELD_TRANS) public String trans; @SerializedName(FIELD_TOKEN) public String token; public ValidateRequestBody() { } public String getTrans() { return trans; } public ValidateRequestBody setTrans(String trans) { this.trans = trans; return this; } public String getToken() { return token; } public ValidateRequestBody setToken(String token) { this.token = token; return this; } @Override public HashMap<String, String> getParamsHashMap() { HashMap<String, String> params = new HashMap<>(); params.put(FIELD_TRANS, getTrans()); params.put(FIELD_TOKEN, getToken()); return params; } }
// ... existing code ... public ValidateRequestBody() { } public String getTrans() { return trans; } public ValidateRequestBody setTrans(String trans) { this.trans = trans; return this; } public String getToken() { return token; } public ValidateRequestBody setToken(String token) { this.token = token; return this; } @Override public HashMap<String, String> getParamsHashMap() { HashMap<String, String> params = new HashMap<>(); params.put(FIELD_TRANS, getTrans()); params.put(FIELD_TOKEN, getToken()); return params; } } // ... rest of the code ...
ccd660c5deba37c0c324e64666eb6421696b3144
puffin/gui/form.py
puffin/gui/form.py
from flask_wtf import Form from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField from wtforms.validators import Required, Length, Regexp from ..core.db import db from ..core.security import User from .. import app class ApplicationForm(Form): start = SubmitField('Start') stop = SubmitField('Stop') class ApplicationSettingsForm(Form): domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider") submit = SubmitField('Update') def validate(self): rv = Form.validate(self) if not rv: return False if self.domain.data: server_name = app.config["SERVER_NAME_FULL"] if server_name != "localhost" and self.domain.data.endswith(server_name): self.domain.errors.append('Invalid domain, cannot end with ' + server_name) return False return True class ProfileForm(Form): login = StringField('Login') email = StringField('Email') name = StringField('Name', validators=[Required(), Length(1, 64), Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')]) submit = SubmitField('Update')
from flask_wtf import Form from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField from wtforms.validators import Required, Length, Regexp from ..core.db import db from ..core.security import User from .. import app class ApplicationForm(Form): start = SubmitField('Start') stop = SubmitField('Stop') class ApplicationSettingsForm(Form): domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider") submit = SubmitField('Update') def validate(self): rv = Form.validate(self) if not rv: return False if self.domain.data: server_name = app.config["SERVER_NAME_FULL"] if (server_name != "localhost" and not self.domain.data.endswith(current_user.login + "." + server_name) and self.domain.data.endswith(server_name)): self.domain.errors.append('Invalid domain, cannot end with ' + server_name) return False return True class ProfileForm(Form): login = StringField('Login') email = StringField('Email') name = StringField('Name', validators=[Required(), Length(1, 64), Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')]) submit = SubmitField('Update')
Allow changing to own domain name
Allow changing to own domain name
Python
agpl-3.0
loomchild/jenca-puffin,puffinrocks/puffin,loomchild/puffin,loomchild/puffin,loomchild/puffin,loomchild/puffin,puffinrocks/puffin,loomchild/jenca-puffin,loomchild/puffin
python
## Code Before: from flask_wtf import Form from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField from wtforms.validators import Required, Length, Regexp from ..core.db import db from ..core.security import User from .. import app class ApplicationForm(Form): start = SubmitField('Start') stop = SubmitField('Stop') class ApplicationSettingsForm(Form): domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider") submit = SubmitField('Update') def validate(self): rv = Form.validate(self) if not rv: return False if self.domain.data: server_name = app.config["SERVER_NAME_FULL"] if server_name != "localhost" and self.domain.data.endswith(server_name): self.domain.errors.append('Invalid domain, cannot end with ' + server_name) return False return True class ProfileForm(Form): login = StringField('Login') email = StringField('Email') name = StringField('Name', validators=[Required(), Length(1, 64), Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')]) submit = SubmitField('Update') ## Instruction: Allow changing to own domain name ## Code After: from flask_wtf import Form from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField from wtforms.validators import Required, Length, Regexp from ..core.db import db from ..core.security import User from .. import app class ApplicationForm(Form): start = SubmitField('Start') stop = SubmitField('Stop') class ApplicationSettingsForm(Form): domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider") submit = SubmitField('Update') def validate(self): rv = Form.validate(self) if not rv: return False if self.domain.data: server_name = app.config["SERVER_NAME_FULL"] if (server_name != "localhost" and not self.domain.data.endswith(current_user.login + "." + server_name) and self.domain.data.endswith(server_name)): self.domain.errors.append('Invalid domain, cannot end with ' + server_name) return False return True class ProfileForm(Form): login = StringField('Login') email = StringField('Email') name = StringField('Name', validators=[Required(), Length(1, 64), Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')]) submit = SubmitField('Update')
... if self.domain.data: server_name = app.config["SERVER_NAME_FULL"] if (server_name != "localhost" and not self.domain.data.endswith(current_user.login + "." + server_name) and self.domain.data.endswith(server_name)): self.domain.errors.append('Invalid domain, cannot end with ' + server_name) return False ...
4b418cee7bcf1f2d47674a94c5070f40771f54f5
BayesClassification.py
BayesClassification.py
import sys #------------------------------------------------------------------------------# # # # CLASSES # # # #------------------------------------------------------------------------------# #------------------------------------------------------------------------------# # # # UTILITIES FUNCTIONS # # # #------------------------------------------------------------------------------# #------------------------------------------------------------------------------# # # # "MAIN" FUNCTION # # # #------------------------------------------------------------------------------# # If this is the main module, run this if __name__ == '__main__': argsCount = len(sys.argv) argsIndex = 1
import sys #------------------------------------------------------------------------------# # # # CLASSES # # # #------------------------------------------------------------------------------# class DataFile: def __init__(self, fileLine, isGood): """ :rtype : object """ self.isGood = isGood self.fileLine = fileLine self.wordsCount = {} self.words = fileLine.split() for word in self.words: try: self.wordsCount[word] += 1 except KeyError: self.wordsCount[word] = 1 self.sumWords = sum(self.wordsCount.values()) def __repr__(self): print("input : "+self.fileLine) for key, val in self.wordsCount.items(): print(str(key)+" "+str(val)) print(str(self.sumWords)) return "" #------------------------------------------------------------------------------# # # # UTILITIES FUNCTIONS # # # #------------------------------------------------------------------------------# #------------------------------------------------------------------------------# # # # "MAIN" FUNCTION # # # #------------------------------------------------------------------------------# # If this is the main module, run this if __name__ == '__main__': argsCount = len(sys.argv) argsIndex = 1 toto = DataFile("coucou je suis une grosse bite et je vous emmerde Monsieur le PD n'ha n'ha n'aire", True) print(toto)
Add DataFile class to split words of a line and count it
Add DataFile class to split words of a line and count it
Python
apache-2.0
Chavjoh/BayesClassificationPython
python
## Code Before: import sys #------------------------------------------------------------------------------# # # # CLASSES # # # #------------------------------------------------------------------------------# #------------------------------------------------------------------------------# # # # UTILITIES FUNCTIONS # # # #------------------------------------------------------------------------------# #------------------------------------------------------------------------------# # # # "MAIN" FUNCTION # # # #------------------------------------------------------------------------------# # If this is the main module, run this if __name__ == '__main__': argsCount = len(sys.argv) argsIndex = 1 ## Instruction: Add DataFile class to split words of a line and count it ## Code After: import sys #------------------------------------------------------------------------------# # # # CLASSES # # # #------------------------------------------------------------------------------# class DataFile: def __init__(self, fileLine, isGood): """ :rtype : object """ self.isGood = isGood self.fileLine = fileLine self.wordsCount = {} self.words = fileLine.split() for word in self.words: try: self.wordsCount[word] += 1 except KeyError: self.wordsCount[word] = 1 self.sumWords = sum(self.wordsCount.values()) def __repr__(self): print("input : "+self.fileLine) for key, val in self.wordsCount.items(): print(str(key)+" "+str(val)) print(str(self.sumWords)) return "" #------------------------------------------------------------------------------# # # # UTILITIES FUNCTIONS # # # #------------------------------------------------------------------------------# #------------------------------------------------------------------------------# # # # "MAIN" FUNCTION # # # #------------------------------------------------------------------------------# # If this is the main module, run this if __name__ == '__main__': argsCount = len(sys.argv) argsIndex = 1 toto = DataFile("coucou je suis une grosse bite et je vous emmerde Monsieur le PD n'ha n'ha n'aire", True) print(toto)
... # CLASSES # # # #------------------------------------------------------------------------------# class DataFile: def __init__(self, fileLine, isGood): """ :rtype : object """ self.isGood = isGood self.fileLine = fileLine self.wordsCount = {} self.words = fileLine.split() for word in self.words: try: self.wordsCount[word] += 1 except KeyError: self.wordsCount[word] = 1 self.sumWords = sum(self.wordsCount.values()) def __repr__(self): print("input : "+self.fileLine) for key, val in self.wordsCount.items(): print(str(key)+" "+str(val)) print(str(self.sumWords)) return "" #------------------------------------------------------------------------------# ... argsCount = len(sys.argv) argsIndex = 1 toto = DataFile("coucou je suis une grosse bite et je vous emmerde Monsieur le PD n'ha n'ha n'aire", True) print(toto) ...
2d3e52567d7d361428ce93d02cc42ecaddacab6c
tests/test_commands.py
tests/test_commands.py
from couchapp import commands from couchapp.errors import AppError from mock import Mock, patch from nose.tools import raises @patch('couchapp.commands.document') def test_init_dest(mock_doc): commands.init(None, None, '/tmp/mk') mock_doc.assert_called_once_with('/tmp/mk', create=True) @patch('os.getcwd', return_value='/mock_dir') @patch('couchapp.commands.document') def test_init_dest_auto(mock_doc, mock_cwd): commands.init(None, None) mock_doc.assert_called_once_with('/mock_dir', create=True) @raises(AppError) @patch('os.getcwd', return_value=None) @patch('couchapp.commands.document') def test_init_dest_auto(mock_doc, mock_cwd): commands.init(None, None)
from couchapp import commands from couchapp.errors import AppError from mock import Mock, patch from nose.tools import raises @patch('couchapp.commands.document') def test_init_dest(mock_doc): commands.init(None, None, '/tmp/mk') mock_doc.assert_called_once_with('/tmp/mk', create=True) @patch('os.getcwd', return_value='/mock_dir') @patch('couchapp.commands.document') def test_init_dest_auto(mock_doc, mock_cwd): commands.init(None, None) mock_doc.assert_called_once_with('/mock_dir', create=True) @raises(AppError) @patch('os.getcwd', return_value=None) @patch('couchapp.commands.document') def test_init_dest_none(mock_doc, mock_cwd): commands.init(None, None) def test_push_outside(): ''' $ couchapp push /path/to/app ''' pass @patch('couchapp.commands.document', return_value='{"status": "ok"}') def test_push_export_outside(mock_doc): ''' $ couchapp push --export /path/to/app ''' conf = Mock(name='conf') appdir = '/mock_dir' commands.push(conf, None, appdir, export=True) mock_doc.assert_called_once_with(appdir, create=False, docid=None) conf.update.assert_called_once_with(appdir) @patch('couchapp.commands.document', return_value='{"status": "ok"}') def test_push_export_inside(mock_doc): ''' In the app dir:: $ couchapp push --export ''' conf = Mock(name='conf') appdir = '/mock_dir' commands.push(conf, appdir, export=True) mock_doc.assert_called_once_with(appdir, create=False, docid=None) conf.update.assert_called_once_with(appdir)
Test cases for push with export flag
Test cases for push with export flag
Python
apache-2.0
couchapp/couchapp,h4ki/couchapp,couchapp/couchapp,couchapp/couchapp,h4ki/couchapp,h4ki/couchapp,couchapp/couchapp,h4ki/couchapp
python
## Code Before: from couchapp import commands from couchapp.errors import AppError from mock import Mock, patch from nose.tools import raises @patch('couchapp.commands.document') def test_init_dest(mock_doc): commands.init(None, None, '/tmp/mk') mock_doc.assert_called_once_with('/tmp/mk', create=True) @patch('os.getcwd', return_value='/mock_dir') @patch('couchapp.commands.document') def test_init_dest_auto(mock_doc, mock_cwd): commands.init(None, None) mock_doc.assert_called_once_with('/mock_dir', create=True) @raises(AppError) @patch('os.getcwd', return_value=None) @patch('couchapp.commands.document') def test_init_dest_auto(mock_doc, mock_cwd): commands.init(None, None) ## Instruction: Test cases for push with export flag ## Code After: from couchapp import commands from couchapp.errors import AppError from mock import Mock, patch from nose.tools import raises @patch('couchapp.commands.document') def test_init_dest(mock_doc): commands.init(None, None, '/tmp/mk') mock_doc.assert_called_once_with('/tmp/mk', create=True) @patch('os.getcwd', return_value='/mock_dir') @patch('couchapp.commands.document') def test_init_dest_auto(mock_doc, mock_cwd): commands.init(None, None) mock_doc.assert_called_once_with('/mock_dir', create=True) @raises(AppError) @patch('os.getcwd', return_value=None) @patch('couchapp.commands.document') def test_init_dest_none(mock_doc, mock_cwd): commands.init(None, None) def test_push_outside(): ''' $ couchapp push /path/to/app ''' pass @patch('couchapp.commands.document', return_value='{"status": "ok"}') def test_push_export_outside(mock_doc): ''' $ couchapp push --export /path/to/app ''' conf = Mock(name='conf') appdir = '/mock_dir' commands.push(conf, None, appdir, export=True) mock_doc.assert_called_once_with(appdir, create=False, docid=None) conf.update.assert_called_once_with(appdir) @patch('couchapp.commands.document', return_value='{"status": "ok"}') def test_push_export_inside(mock_doc): ''' In the app dir:: $ couchapp push --export ''' conf = Mock(name='conf') appdir = '/mock_dir' commands.push(conf, appdir, export=True) mock_doc.assert_called_once_with(appdir, create=False, docid=None) conf.update.assert_called_once_with(appdir)
// ... existing code ... @raises(AppError) @patch('os.getcwd', return_value=None) @patch('couchapp.commands.document') def test_init_dest_none(mock_doc, mock_cwd): commands.init(None, None) def test_push_outside(): ''' $ couchapp push /path/to/app ''' pass @patch('couchapp.commands.document', return_value='{"status": "ok"}') def test_push_export_outside(mock_doc): ''' $ couchapp push --export /path/to/app ''' conf = Mock(name='conf') appdir = '/mock_dir' commands.push(conf, None, appdir, export=True) mock_doc.assert_called_once_with(appdir, create=False, docid=None) conf.update.assert_called_once_with(appdir) @patch('couchapp.commands.document', return_value='{"status": "ok"}') def test_push_export_inside(mock_doc): ''' In the app dir:: $ couchapp push --export ''' conf = Mock(name='conf') appdir = '/mock_dir' commands.push(conf, appdir, export=True) mock_doc.assert_called_once_with(appdir, create=False, docid=None) conf.update.assert_called_once_with(appdir) // ... rest of the code ...
30373694f22130e6de5e9359adb3076688d8f59b
jmespath-core/src/main/java/io/burt/jmespath/function/ToArrayFunction.java
jmespath-core/src/main/java/io/burt/jmespath/function/ToArrayFunction.java
package io.burt.jmespath.function; import java.util.List; import java.util.Arrays; import io.burt.jmespath.Adapter; import io.burt.jmespath.JmesPathType; public class ToArrayFunction extends BaseFunction { public ToArrayFunction() { super(ArgumentConstraints.anyValue()); } @Override protected <T> T callFunction(Adapter<T> runtime, List<FunctionArgument<T>> arguments) { T subject = arguments.get(0).value(); if (runtime.typeOf(subject) == JmesPathType.ARRAY) { return subject; } else { return runtime.createArray(Arrays.asList(subject)); } } }
package io.burt.jmespath.function; import java.util.Collections; import java.util.List; import java.util.Arrays; import io.burt.jmespath.Adapter; import io.burt.jmespath.JmesPathType; public class ToArrayFunction extends BaseFunction { public ToArrayFunction() { super(ArgumentConstraints.anyValue()); } @Override protected <T> T callFunction(Adapter<T> runtime, List<FunctionArgument<T>> arguments) { T subject = arguments.get(0).value(); if (runtime.typeOf(subject) == JmesPathType.ARRAY) { return subject; } else { return runtime.createArray(Collections.singletonList(subject)); } } }
Use the right kind of list creating method to show the correct intent
Use the right kind of list creating method to show the correct intent
Java
bsd-3-clause
burtcorp/jmespath-java
java
## Code Before: package io.burt.jmespath.function; import java.util.List; import java.util.Arrays; import io.burt.jmespath.Adapter; import io.burt.jmespath.JmesPathType; public class ToArrayFunction extends BaseFunction { public ToArrayFunction() { super(ArgumentConstraints.anyValue()); } @Override protected <T> T callFunction(Adapter<T> runtime, List<FunctionArgument<T>> arguments) { T subject = arguments.get(0).value(); if (runtime.typeOf(subject) == JmesPathType.ARRAY) { return subject; } else { return runtime.createArray(Arrays.asList(subject)); } } } ## Instruction: Use the right kind of list creating method to show the correct intent ## Code After: package io.burt.jmespath.function; import java.util.Collections; import java.util.List; import java.util.Arrays; import io.burt.jmespath.Adapter; import io.burt.jmespath.JmesPathType; public class ToArrayFunction extends BaseFunction { public ToArrayFunction() { super(ArgumentConstraints.anyValue()); } @Override protected <T> T callFunction(Adapter<T> runtime, List<FunctionArgument<T>> arguments) { T subject = arguments.get(0).value(); if (runtime.typeOf(subject) == JmesPathType.ARRAY) { return subject; } else { return runtime.createArray(Collections.singletonList(subject)); } } }
... package io.burt.jmespath.function; import java.util.Collections; import java.util.List; import java.util.Arrays; ... if (runtime.typeOf(subject) == JmesPathType.ARRAY) { return subject; } else { return runtime.createArray(Collections.singletonList(subject)); } } } ...
ed09a3ded286cc4d5623c17e65b2d40ef55ccee7
valohai_yaml/parsing.py
valohai_yaml/parsing.py
from typing import IO, Union from valohai_yaml.objs import Config from .utils import read_yaml def parse(yaml: Union[dict, list, bytes, str, IO], validate: bool = True) -> Config: """ Parse the given YAML data into a `Config` object, optionally validating it first. :param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list) :param validate: Whether to validate the data before attempting to parse it. :return: Config object """ data = read_yaml(yaml) if validate: # pragma: no branch from .validation import validate as do_validate do_validate(data, raise_exc=True) return Config.parse(data)
from typing import IO, Union from valohai_yaml.objs import Config from .utils import read_yaml def parse(yaml: Union[dict, list, bytes, str, IO], validate: bool = True) -> Config: """ Parse the given YAML data into a `Config` object, optionally validating it first. :param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list) :param validate: Whether to validate the data before attempting to parse it. :return: Config object """ data = read_yaml(yaml) if data is None: # empty file return Config() if validate: # pragma: no branch from .validation import validate as do_validate do_validate(data, raise_exc=True) return Config.parse(data)
Handle empty YAML files in parse()
Handle empty YAML files in parse() Refs valohai/valohai-cli#170
Python
mit
valohai/valohai-yaml
python
## Code Before: from typing import IO, Union from valohai_yaml.objs import Config from .utils import read_yaml def parse(yaml: Union[dict, list, bytes, str, IO], validate: bool = True) -> Config: """ Parse the given YAML data into a `Config` object, optionally validating it first. :param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list) :param validate: Whether to validate the data before attempting to parse it. :return: Config object """ data = read_yaml(yaml) if validate: # pragma: no branch from .validation import validate as do_validate do_validate(data, raise_exc=True) return Config.parse(data) ## Instruction: Handle empty YAML files in parse() Refs valohai/valohai-cli#170 ## Code After: from typing import IO, Union from valohai_yaml.objs import Config from .utils import read_yaml def parse(yaml: Union[dict, list, bytes, str, IO], validate: bool = True) -> Config: """ Parse the given YAML data into a `Config` object, optionally validating it first. :param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list) :param validate: Whether to validate the data before attempting to parse it. :return: Config object """ data = read_yaml(yaml) if data is None: # empty file return Config() if validate: # pragma: no branch from .validation import validate as do_validate do_validate(data, raise_exc=True) return Config.parse(data)
// ... existing code ... :return: Config object """ data = read_yaml(yaml) if data is None: # empty file return Config() if validate: # pragma: no branch from .validation import validate as do_validate do_validate(data, raise_exc=True) // ... rest of the code ...
9d5c6d1a379790203d75859fed6f34e9bb94dd7a
src/main/java/com/minelittlepony/pony/data/PonyWearable.java
src/main/java/com/minelittlepony/pony/data/PonyWearable.java
package com.minelittlepony.pony.data; import java.util.ArrayList; import java.util.List; public enum PonyWearable implements ITriggerPixelMapped<PonyWearable> { NONE(0), SADDLE_BAGS(0xff0000), HAT(0x00ff00); private int triggerValue; PonyWearable(int pixel) { triggerValue = pixel; } @Override public int getTriggerPixel() { return triggerValue; } public static PonyWearable[] flags(boolean[] flags) { List<PonyWearable> wears = new ArrayList<PonyWearable>(); PonyWearable[] values = values(); for (int i = 0; i < values.length; i++) { if (flags[i]) wears.add(values[i]); } return wears.toArray(new PonyWearable[wears.size()]); } }
package com.minelittlepony.pony.data; import java.util.ArrayList; import java.util.List; public enum PonyWearable implements ITriggerPixelMapped<PonyWearable> { NONE(0), SADDLE_BAGS(255), HAT(100); private int triggerValue; PonyWearable(int pixel) { triggerValue = pixel; } @Override public int getTriggerPixel() { return triggerValue; } public static PonyWearable[] flags(boolean[] flags) { List<PonyWearable> wears = new ArrayList<PonyWearable>(); PonyWearable[] values = values(); for (int i = 0; i < values.length; i++) { if (flags[i]) wears.add(values[i]); } return wears.toArray(new PonyWearable[wears.size()]); } }
Fix trigger pixel for saddlebags
Fix trigger pixel for saddlebags
Java
mit
MineLittlePony/MineLittlePony,MineLittlePony/MineLittlePony
java
## Code Before: package com.minelittlepony.pony.data; import java.util.ArrayList; import java.util.List; public enum PonyWearable implements ITriggerPixelMapped<PonyWearable> { NONE(0), SADDLE_BAGS(0xff0000), HAT(0x00ff00); private int triggerValue; PonyWearable(int pixel) { triggerValue = pixel; } @Override public int getTriggerPixel() { return triggerValue; } public static PonyWearable[] flags(boolean[] flags) { List<PonyWearable> wears = new ArrayList<PonyWearable>(); PonyWearable[] values = values(); for (int i = 0; i < values.length; i++) { if (flags[i]) wears.add(values[i]); } return wears.toArray(new PonyWearable[wears.size()]); } } ## Instruction: Fix trigger pixel for saddlebags ## Code After: package com.minelittlepony.pony.data; import java.util.ArrayList; import java.util.List; public enum PonyWearable implements ITriggerPixelMapped<PonyWearable> { NONE(0), SADDLE_BAGS(255), HAT(100); private int triggerValue; PonyWearable(int pixel) { triggerValue = pixel; } @Override public int getTriggerPixel() { return triggerValue; } public static PonyWearable[] flags(boolean[] flags) { List<PonyWearable> wears = new ArrayList<PonyWearable>(); PonyWearable[] values = values(); for (int i = 0; i < values.length; i++) { if (flags[i]) wears.add(values[i]); } return wears.toArray(new PonyWearable[wears.size()]); } }
// ... existing code ... public enum PonyWearable implements ITriggerPixelMapped<PonyWearable> { NONE(0), SADDLE_BAGS(255), HAT(100); private int triggerValue; // ... rest of the code ...
f0d629ae8b4568b2aceaf38779c8b07832e860b0
teamspeak_web_utils.py
teamspeak_web_utils.py
import re from bs4 import BeautifulSoup import cfscrape def nplstatus(): scraper = cfscrape.create_scraper() data = scraper.get('http://npl.teamspeakusa.com/ts3npl.php').content soup = BeautifulSoup(data, 'html.parser') raw_status = soup.find_all(class_='register_linklabel')[2].span return not raw_status def latest_version(): scraper = cfscrape.create_scraper() data = scraper.get('http://teamspeak.com/downloads').content soup = BeautifulSoup(data, 'html.parser') def search(search_string): return soup.find_all(text=re.compile(search_string))[0].parent.\ find(class_='version').text return search(r'Client\ 64\-bit'), search(r'Server\ 64\-bit')
import re from bs4 import BeautifulSoup import cfscrape def nplstatus(): scraper = cfscrape.create_scraper() data = scraper.get('http://npl.teamspeakusa.com/ts3npl.php').content soup = BeautifulSoup(data, 'html.parser') raw_status = soup.find_all(class_='register_linklabel')[2].span return not raw_status def latest_version(): scraper = cfscrape.create_scraper() data = scraper.get('http://teamspeak.com/downloads').content soup = BeautifulSoup(data, 'html.parser') def search(search_string): return soup.find_all(text=re.compile(search_string))[0].parent.\ find(class_='version').text def clean(s): return s.replace('\n', '').strip() return clean(search(r'Client\ 64\-bit')), \ clean(search(r'Server\ 64\-bit'))
Clean string returned by website
Clean string returned by website => remove newline-characters and strip
Python
mit
Thor77/TeamspeakIRC
python
## Code Before: import re from bs4 import BeautifulSoup import cfscrape def nplstatus(): scraper = cfscrape.create_scraper() data = scraper.get('http://npl.teamspeakusa.com/ts3npl.php').content soup = BeautifulSoup(data, 'html.parser') raw_status = soup.find_all(class_='register_linklabel')[2].span return not raw_status def latest_version(): scraper = cfscrape.create_scraper() data = scraper.get('http://teamspeak.com/downloads').content soup = BeautifulSoup(data, 'html.parser') def search(search_string): return soup.find_all(text=re.compile(search_string))[0].parent.\ find(class_='version').text return search(r'Client\ 64\-bit'), search(r'Server\ 64\-bit') ## Instruction: Clean string returned by website => remove newline-characters and strip ## Code After: import re from bs4 import BeautifulSoup import cfscrape def nplstatus(): scraper = cfscrape.create_scraper() data = scraper.get('http://npl.teamspeakusa.com/ts3npl.php').content soup = BeautifulSoup(data, 'html.parser') raw_status = soup.find_all(class_='register_linklabel')[2].span return not raw_status def latest_version(): scraper = cfscrape.create_scraper() data = scraper.get('http://teamspeak.com/downloads').content soup = BeautifulSoup(data, 'html.parser') def search(search_string): return soup.find_all(text=re.compile(search_string))[0].parent.\ find(class_='version').text def clean(s): return s.replace('\n', '').strip() return clean(search(r'Client\ 64\-bit')), \ clean(search(r'Server\ 64\-bit'))
# ... existing code ... def search(search_string): return soup.find_all(text=re.compile(search_string))[0].parent.\ find(class_='version').text def clean(s): return s.replace('\n', '').strip() return clean(search(r'Client\ 64\-bit')), \ clean(search(r'Server\ 64\-bit')) # ... rest of the code ...
799ebb2390495641e390acd1a077a2b7501e28e2
core/src/main/java/org/realityforge/arez/Disposable.java
core/src/main/java/org/realityforge/arez/Disposable.java
package org.realityforge.arez; /** * Interface that allows caller to release any resources associated with element. * It is safe to invoke {@link #dispose()} multiple times on a element. Dispose * is considered a state modifying action and must be called either within an * action where mutation is true or else it will start it's own transaction before * performing dispose. Once an element is disposed, no other methods should be invoked * on element. */ @FunctionalInterface public interface Disposable { /** * Dispose element. */ void dispose(); }
package org.realityforge.arez; /** * Interface that allows caller to release any resources associated with element. * It is safe to invoke {@link #dispose()} multiple times on a element. Dispose * is considered a state modifying action and must be called either within an * action where mutation is true or else it will start it's own transaction before * performing dispose. Once an element is disposed, no other methods should be invoked * on element. */ @FunctionalInterface public interface Disposable { /** * Dispose element. See {@link Disposable} for a description of the implications. */ void dispose(); }
Update the description so docs can be inherited by implementers
Update the description so docs can be inherited by implementers
Java
apache-2.0
realityforge/arez,realityforge/arez,realityforge/arez
java
## Code Before: package org.realityforge.arez; /** * Interface that allows caller to release any resources associated with element. * It is safe to invoke {@link #dispose()} multiple times on a element. Dispose * is considered a state modifying action and must be called either within an * action where mutation is true or else it will start it's own transaction before * performing dispose. Once an element is disposed, no other methods should be invoked * on element. */ @FunctionalInterface public interface Disposable { /** * Dispose element. */ void dispose(); } ## Instruction: Update the description so docs can be inherited by implementers ## Code After: package org.realityforge.arez; /** * Interface that allows caller to release any resources associated with element. * It is safe to invoke {@link #dispose()} multiple times on a element. Dispose * is considered a state modifying action and must be called either within an * action where mutation is true or else it will start it's own transaction before * performing dispose. Once an element is disposed, no other methods should be invoked * on element. */ @FunctionalInterface public interface Disposable { /** * Dispose element. See {@link Disposable} for a description of the implications. */ void dispose(); }
# ... existing code ... public interface Disposable { /** * Dispose element. See {@link Disposable} for a description of the implications. */ void dispose(); } # ... rest of the code ...
4de03c57bf4f4995eb8c8859e0a40b7c5fc9942b
desktop/libs/libzookeeper/src/libzookeeper/models.py
desktop/libs/libzookeeper/src/libzookeeper/models.py
from kazoo.client import KazooClient from libzookeeper.conf import PRINCIPAL_NAME def get_children_data(ensemble, namespace, read_only=True): zk = KazooClient(hosts=ensemble, read_only=read_only, sasl_server_principal=PRINCIPAL_NAME.get()) zk.start() children_data = [] children = zk.get_children(namespace) for node in children: data, stat = zk.get("%s/%s" % (namespace, node)) children_data.append(data) zk.stop() return children_data
from kazoo.client import KazooClient from hadoop import cluster from desktop.lib.exceptions_renderable import PopupException from libzookeeper.conf import PRINCIPAL_NAME def get_children_data(ensemble, namespace, read_only=True): hdfs = cluster.get_hdfs() if hdfs is None: raise PopupException(_('No [hdfs] configured in hue.ini.')) if hdfs.security_enabled: sasl_server_principal = PRINCIPAL_NAME.get() else: sasl_server_principal = None zk = KazooClient(hosts=ensemble, read_only=read_only, sasl_server_principal=sasl_server_principal) zk.start() children_data = [] children = zk.get_children(namespace) for node in children: data, stat = zk.get("%s/%s" % (namespace, node)) children_data.append(data) zk.stop() return children_data
Enable Kerberos automatically based on HDFS security
[libzookeeper] Enable Kerberos automatically based on HDFS security We don't need another property that way and Kerberos is a all or nothing setup. Even if HDFS is not used in Hue, the default hue.ini has security set to false.
Python
apache-2.0
pratikmallya/hue,jjmleiro/hue,lumig242/Hue-Integration-with-CDAP,cloudera/hue,pratikmallya/hue,xiangel/hue,Peddle/hue,x303597316/hue,cloudera/hue,rahul67/hue,kawamon/hue,yongshengwang/hue,MobinRanjbar/hue,x303597316/hue,xq262144/hue,jayceyxc/hue,mapr/hue,yongshengwang/hue,pratikmallya/hue,sanjeevtripurari/hue,lumig242/Hue-Integration-with-CDAP,jayceyxc/hue,cloudera/hue,jounex/hue,Peddle/hue,ChenJunor/hue,jayceyxc/hue,kawamon/hue,Peddle/hue,vmax-feihu/hue,MobinRanjbar/hue,vmax-feihu/hue,ahmed-mahran/hue,ahmed-mahran/hue,kawamon/hue,kawamon/hue,GitHublong/hue,hdinsight/hue,Peddle/hue,kawamon/hue,xiangel/hue,yoer/hue,pratikmallya/hue,yongshengwang/hue,kawamon/hue,cloudera/hue,jounex/hue,cloudera/hue,rahul67/hue,javachengwc/hue,azureplus/hue,kawamon/hue,vmax-feihu/hue,fangxingli/hue,hdinsight/hue,jounex/hue,cloudera/hue,sanjeevtripurari/hue,rahul67/hue,lumig242/Hue-Integration-with-CDAP,jjmleiro/hue,kawamon/hue,cloudera/hue,javachengwc/hue,cloudera/hue,yongshengwang/hue,jjmleiro/hue,mapr/hue,yoer/hue,yongshengwang/hue,todaychi/hue,kawamon/hue,GitHublong/hue,xq262144/hue,jounex/hue,kawamon/hue,ChenJunor/hue,cloudera/hue,x303597316/hue,jayceyxc/hue,todaychi/hue,jounex/hue,GitHublong/hue,hdinsight/hue,yoer/hue,hdinsight/hue,fangxingli/hue,kawamon/hue,lumig242/Hue-Integration-with-CDAP,x303597316/hue,ahmed-mahran/hue,azureplus/hue,xiangel/hue,todaychi/hue,jjmleiro/hue,jounex/hue,cloudera/hue,fangxingli/hue,vmax-feihu/hue,yongshengwang/hue,hdinsight/hue,kawamon/hue,ChenJunor/hue,todaychi/hue,lumig242/Hue-Integration-with-CDAP,rahul67/hue,javachengwc/hue,todaychi/hue,todaychi/hue,rahul67/hue,kawamon/hue,GitHublong/hue,yoer/hue,fangxingli/hue,javachengwc/hue,todaychi/hue,cloudera/hue,cloudera/hue,ChenJunor/hue,jayceyxc/hue,jayceyxc/hue,Peddle/hue,sanjeevtripurari/hue,lumig242/Hue-Integration-with-CDAP,xq262144/hue,pratikmallya/hue,GitHublong/hue,x303597316/hue,jayceyxc/hue,ahmed-mahran/hue,MobinRanjbar/hue,MobinRanjbar/hue,vmax-feihu/hue,fangxingli/hue,mapr/hue,xiangel/hue,kawamon/hue,lumig242/Hue-Integration-with-CDAP,mapr/hue,mapr/hue,fangxingli/hue,ChenJunor/hue,jayceyxc/hue,jjmleiro/hue,GitHublong/hue,todaychi/hue,pratikmallya/hue,cloudera/hue,ahmed-mahran/hue,cloudera/hue,xq262144/hue,rahul67/hue,jayceyxc/hue,azureplus/hue,jjmleiro/hue,MobinRanjbar/hue,javachengwc/hue,azureplus/hue,mapr/hue,jounex/hue,xiangel/hue,vmax-feihu/hue,hdinsight/hue,sanjeevtripurari/hue,ahmed-mahran/hue,yoer/hue,ahmed-mahran/hue,pratikmallya/hue,hdinsight/hue,cloudera/hue,ChenJunor/hue,xiangel/hue,kawamon/hue,xq262144/hue,yongshengwang/hue,jounex/hue,hdinsight/hue,MobinRanjbar/hue,rahul67/hue,Peddle/hue,kawamon/hue,xq262144/hue,azureplus/hue,lumig242/Hue-Integration-with-CDAP,vmax-feihu/hue,Peddle/hue,xiangel/hue,sanjeevtripurari/hue,sanjeevtripurari/hue,x303597316/hue,MobinRanjbar/hue,javachengwc/hue,xq262144/hue,fangxingli/hue,x303597316/hue,mapr/hue,xq262144/hue,pratikmallya/hue,sanjeevtripurari/hue,javachengwc/hue,sanjeevtripurari/hue,yoer/hue,rahul67/hue,kawamon/hue,azureplus/hue,jjmleiro/hue,Peddle/hue,ChenJunor/hue,MobinRanjbar/hue,cloudera/hue,yongshengwang/hue,azureplus/hue,yoer/hue,ChenJunor/hue,cloudera/hue,vmax-feihu/hue,cloudera/hue,azureplus/hue,yoer/hue,jjmleiro/hue,javachengwc/hue,GitHublong/hue,Peddle/hue,ahmed-mahran/hue,todaychi/hue,x303597316/hue,jjmleiro/hue,xiangel/hue,lumig242/Hue-Integration-with-CDAP,kawamon/hue,fangxingli/hue,GitHublong/hue,xq262144/hue
python
## Code Before: from kazoo.client import KazooClient from libzookeeper.conf import PRINCIPAL_NAME def get_children_data(ensemble, namespace, read_only=True): zk = KazooClient(hosts=ensemble, read_only=read_only, sasl_server_principal=PRINCIPAL_NAME.get()) zk.start() children_data = [] children = zk.get_children(namespace) for node in children: data, stat = zk.get("%s/%s" % (namespace, node)) children_data.append(data) zk.stop() return children_data ## Instruction: [libzookeeper] Enable Kerberos automatically based on HDFS security We don't need another property that way and Kerberos is a all or nothing setup. Even if HDFS is not used in Hue, the default hue.ini has security set to false. ## Code After: from kazoo.client import KazooClient from hadoop import cluster from desktop.lib.exceptions_renderable import PopupException from libzookeeper.conf import PRINCIPAL_NAME def get_children_data(ensemble, namespace, read_only=True): hdfs = cluster.get_hdfs() if hdfs is None: raise PopupException(_('No [hdfs] configured in hue.ini.')) if hdfs.security_enabled: sasl_server_principal = PRINCIPAL_NAME.get() else: sasl_server_principal = None zk = KazooClient(hosts=ensemble, read_only=read_only, sasl_server_principal=sasl_server_principal) zk.start() children_data = [] children = zk.get_children(namespace) for node in children: data, stat = zk.get("%s/%s" % (namespace, node)) children_data.append(data) zk.stop() return children_data
... from kazoo.client import KazooClient from hadoop import cluster from desktop.lib.exceptions_renderable import PopupException from libzookeeper.conf import PRINCIPAL_NAME def get_children_data(ensemble, namespace, read_only=True): hdfs = cluster.get_hdfs() if hdfs is None: raise PopupException(_('No [hdfs] configured in hue.ini.')) if hdfs.security_enabled: sasl_server_principal = PRINCIPAL_NAME.get() else: sasl_server_principal = None zk = KazooClient(hosts=ensemble, read_only=read_only, sasl_server_principal=sasl_server_principal) zk.start() ... children_data.append(data) zk.stop() return children_data ...
e4ad2863236cd36e5860f1d17a06ca05e30216d5
make_database.py
make_database.py
import sqlite3 CREATE_SONG_QUEUE = ''' CREATE TABLE IF NOT EXISTS jukebox_song_queue ( spotify_uri TEXT, has_played INTEGER DEFAULT 0 ); ''' if __name__ == '__main__': conn = sqlite3.connect('jukebox.db') cursor = conn.cursor() cursor.execute(CREATE_SONG_QUEUE) conn.commit() conn.close()
import sqlite3 CREATE_SONG_QUEUE = ''' CREATE TABLE IF NOT EXISTS jukebox_song_queue ( spotify_uri TEXT, has_played INTEGER DEFAULT 0, name TEXT, artist_name TEXT, artist_uri TEXT, artist_image TEXT, album_name TEXT, album_uri TEXT, album_image TEXT ); ''' if __name__ == '__main__': conn = sqlite3.connect('jukebox.db') cursor = conn.cursor() cursor.execute(CREATE_SONG_QUEUE) conn.commit() conn.close()
Store more stuff about songs in the queue
Store more stuff about songs in the queue
Python
mit
projectweekend/Pi-Jukebox,projectweekend/Pi-Jukebox,projectweekend/Pi-Jukebox
python
## Code Before: import sqlite3 CREATE_SONG_QUEUE = ''' CREATE TABLE IF NOT EXISTS jukebox_song_queue ( spotify_uri TEXT, has_played INTEGER DEFAULT 0 ); ''' if __name__ == '__main__': conn = sqlite3.connect('jukebox.db') cursor = conn.cursor() cursor.execute(CREATE_SONG_QUEUE) conn.commit() conn.close() ## Instruction: Store more stuff about songs in the queue ## Code After: import sqlite3 CREATE_SONG_QUEUE = ''' CREATE TABLE IF NOT EXISTS jukebox_song_queue ( spotify_uri TEXT, has_played INTEGER DEFAULT 0, name TEXT, artist_name TEXT, artist_uri TEXT, artist_image TEXT, album_name TEXT, album_uri TEXT, album_image TEXT ); ''' if __name__ == '__main__': conn = sqlite3.connect('jukebox.db') cursor = conn.cursor() cursor.execute(CREATE_SONG_QUEUE) conn.commit() conn.close()
# ... existing code ... CREATE TABLE IF NOT EXISTS jukebox_song_queue ( spotify_uri TEXT, has_played INTEGER DEFAULT 0, name TEXT, artist_name TEXT, artist_uri TEXT, artist_image TEXT, album_name TEXT, album_uri TEXT, album_image TEXT ); ''' # ... rest of the code ...
ceee44182b24ecdc0563a9e9a6841993d1978d0c
setup.py
setup.py
from distutils.core import setup setup( name='aJohnShots', version="1.0.0", description='Python module/library for saving Security Hash Algorithms into JSON format.', author='funilrys', author_email='[email protected]', license='GPL-3.0 https://opensource.org/licenses/GPL-3.0', url='https://github.com/funilrys/A-John-Shots', platforms=['any'], packages=['a_john_shots'], keywords=['Python', 'JSON', 'SHA 1', 'SHA-512', 'SHA-224', 'SHA-384', 'SHA'], classifiers=[ 'Environment :: Console', 'Topic :: Software Development', 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)' ], ) ''' test_suite='testsuite', entry_points=""" [console_scripts] cmd = package:main """, '''
from distutils.core import setup setup( name='a_john_shots', version="1.0.0", description='Python module/library for saving Security Hash Algorithms into JSON format.', long_description=open('README').read(), author='funilrys', author_email='[email protected]', license='GPL-3.0 https://opensource.org/licenses/GPL-3.0', url='https://github.com/funilrys/A-John-Shots', platforms=['any'], packages=['a_john_shots'], keywords=['Python', 'JSON', 'SHA-1', 'SHA-512', 'SHA-224', 'SHA-384', 'SHA', 'MD5'], classifiers=[ 'Environment :: Console', 'Topic :: Software Development', 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)' ], ) ''' test_suite='testsuite', entry_points=""" [console_scripts] cmd = package:main """, '''
Rename + add long_description + update keywords
Rename + add long_description + update keywords
Python
mit
funilrys/A-John-Shots
python
## Code Before: from distutils.core import setup setup( name='aJohnShots', version="1.0.0", description='Python module/library for saving Security Hash Algorithms into JSON format.', author='funilrys', author_email='[email protected]', license='GPL-3.0 https://opensource.org/licenses/GPL-3.0', url='https://github.com/funilrys/A-John-Shots', platforms=['any'], packages=['a_john_shots'], keywords=['Python', 'JSON', 'SHA 1', 'SHA-512', 'SHA-224', 'SHA-384', 'SHA'], classifiers=[ 'Environment :: Console', 'Topic :: Software Development', 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)' ], ) ''' test_suite='testsuite', entry_points=""" [console_scripts] cmd = package:main """, ''' ## Instruction: Rename + add long_description + update keywords ## Code After: from distutils.core import setup setup( name='a_john_shots', version="1.0.0", description='Python module/library for saving Security Hash Algorithms into JSON format.', long_description=open('README').read(), author='funilrys', author_email='[email protected]', license='GPL-3.0 https://opensource.org/licenses/GPL-3.0', url='https://github.com/funilrys/A-John-Shots', platforms=['any'], packages=['a_john_shots'], keywords=['Python', 'JSON', 'SHA-1', 'SHA-512', 'SHA-224', 'SHA-384', 'SHA', 'MD5'], classifiers=[ 'Environment :: Console', 'Topic :: Software Development', 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)' ], ) ''' test_suite='testsuite', entry_points=""" [console_scripts] cmd = package:main """, '''
// ... existing code ... from distutils.core import setup setup( name='a_john_shots', version="1.0.0", description='Python module/library for saving Security Hash Algorithms into JSON format.', long_description=open('README').read(), author='funilrys', author_email='[email protected]', license='GPL-3.0 https://opensource.org/licenses/GPL-3.0', // ... modified code ... url='https://github.com/funilrys/A-John-Shots', platforms=['any'], packages=['a_john_shots'], keywords=['Python', 'JSON', 'SHA-1', 'SHA-512', 'SHA-224', 'SHA-384', 'SHA', 'MD5'], classifiers=[ 'Environment :: Console', 'Topic :: Software Development', // ... rest of the code ...
2a3b2c886657083ebde55ff6b38459a08cfd43b8
service/src/main/java/com/epam/rft/atsy/service/passwordchange/validation/impl/PasswordAllFieldFilledRule.java
service/src/main/java/com/epam/rft/atsy/service/passwordchange/validation/impl/PasswordAllFieldFilledRule.java
package com.epam.rft.atsy.service.passwordchange.validation.impl; import com.epam.rft.atsy.service.domain.PasswordChangeDTO; import com.epam.rft.atsy.service.passwordchange.validation.PasswordValidationRule; import org.apache.commons.lang3.StringUtils; public class PasswordAllFieldFilledRule implements PasswordValidationRule { private static final String MESSAGE_KEY = "passwordchange.validation.allfieldfilled"; @Override public boolean isValid(PasswordChangeDTO passwordChangeDTO) { String newPassword = passwordChangeDTO.getNewPassword(); return StringUtils.isNotBlank(newPassword) && StringUtils.isNotBlank(newPassword) && StringUtils.isNotBlank(newPassword); } @Override public String getErrorMessageKey() { return MESSAGE_KEY; } }
package com.epam.rft.atsy.service.passwordchange.validation.impl; import com.epam.rft.atsy.service.domain.PasswordChangeDTO; import com.epam.rft.atsy.service.passwordchange.validation.PasswordValidationRule; import org.apache.commons.lang3.StringUtils; public class PasswordAllFieldFilledRule implements PasswordValidationRule { private static final String MESSAGE_KEY = "passwordchange.validation.allfieldfilled"; @Override public boolean isValid(PasswordChangeDTO passwordChangeDTO) { String newPassword = passwordChangeDTO.getNewPassword(); String oldPassword = passwordChangeDTO.getOldPassword(); String newPasswordConfirm = passwordChangeDTO.getNewPasswordConfirm(); return StringUtils.isNotBlank(newPassword) && StringUtils.isNotBlank(oldPassword) && StringUtils.isNotBlank(newPasswordConfirm); } @Override public String getErrorMessageKey() { return MESSAGE_KEY; } }
Fix isValid return statement logic
Fix isValid return statement logic
Java
apache-2.0
epam-debrecen-rft-2015/atsy,epam-debrecen-rft-2015/atsy,epam-debrecen-rft-2015/atsy
java
## Code Before: package com.epam.rft.atsy.service.passwordchange.validation.impl; import com.epam.rft.atsy.service.domain.PasswordChangeDTO; import com.epam.rft.atsy.service.passwordchange.validation.PasswordValidationRule; import org.apache.commons.lang3.StringUtils; public class PasswordAllFieldFilledRule implements PasswordValidationRule { private static final String MESSAGE_KEY = "passwordchange.validation.allfieldfilled"; @Override public boolean isValid(PasswordChangeDTO passwordChangeDTO) { String newPassword = passwordChangeDTO.getNewPassword(); return StringUtils.isNotBlank(newPassword) && StringUtils.isNotBlank(newPassword) && StringUtils.isNotBlank(newPassword); } @Override public String getErrorMessageKey() { return MESSAGE_KEY; } } ## Instruction: Fix isValid return statement logic ## Code After: package com.epam.rft.atsy.service.passwordchange.validation.impl; import com.epam.rft.atsy.service.domain.PasswordChangeDTO; import com.epam.rft.atsy.service.passwordchange.validation.PasswordValidationRule; import org.apache.commons.lang3.StringUtils; public class PasswordAllFieldFilledRule implements PasswordValidationRule { private static final String MESSAGE_KEY = "passwordchange.validation.allfieldfilled"; @Override public boolean isValid(PasswordChangeDTO passwordChangeDTO) { String newPassword = passwordChangeDTO.getNewPassword(); String oldPassword = passwordChangeDTO.getOldPassword(); String newPasswordConfirm = passwordChangeDTO.getNewPasswordConfirm(); return StringUtils.isNotBlank(newPassword) && StringUtils.isNotBlank(oldPassword) && StringUtils.isNotBlank(newPasswordConfirm); } @Override public String getErrorMessageKey() { return MESSAGE_KEY; } }
... @Override public boolean isValid(PasswordChangeDTO passwordChangeDTO) { String newPassword = passwordChangeDTO.getNewPassword(); String oldPassword = passwordChangeDTO.getOldPassword(); String newPasswordConfirm = passwordChangeDTO.getNewPasswordConfirm(); return StringUtils.isNotBlank(newPassword) && StringUtils.isNotBlank(oldPassword) && StringUtils.isNotBlank(newPasswordConfirm); } @Override ...
6fd857752a6a9e91c29265bd95bce2c793798e60
SeleniumGridExtras/src/test/java/com/groupon/seleniumgridextras/utilities/HttpUtilityTest.java
SeleniumGridExtras/src/test/java/com/groupon/seleniumgridextras/utilities/HttpUtilityTest.java
package com.groupon.seleniumgridextras.utilities; import org.junit.Test; import java.net.ConnectException; import java.net.URL; import java.net.UnknownHostException; import static org.junit.Assert.assertEquals; /** * Created with IntelliJ IDEA. User: dima Date: 7/8/14 Time: 4:09 PM To change this template use * File | Settings | File Templates. */ public class HttpUtilityTest { @Test(expected=ConnectException.class) public void testConnectionRefusedError() throws Exception { HttpUtility.getRequest(new URL("http://localhost:9999")).getResponseCode(); } @Test public void test404Page() throws Exception { assertEquals(404, HttpUtility.getRequest(new URL("http://xkcd.com/404")).getResponseCode()); } @Test public void test200Page() throws Exception { assertEquals(200, HttpUtility.getRequest(new URL("http://google.com")).getResponseCode()); } @Test(expected = UnknownHostException.class) public void testUnknownHost() throws Exception { HttpUtility.getRequest(new URL("http://googasdfasfdkjashfdkjahsfdle.com/")).getResponseCode(); } @Test public void testGetAsString() throws Exception{ assertEquals("", HttpUtility.getRequestAsString(new URL("http://xkcd.com/404"))); } }
package com.groupon.seleniumgridextras.utilities; import org.junit.Test; import java.net.ConnectException; import java.net.ServerSocket; import java.net.URL; import java.net.UnknownHostException; import static org.junit.Assert.assertEquals; public class HttpUtilityTest { @Test(expected = ConnectException.class) public void testConnectionRefusedError() throws Exception { ServerSocket serverSocket = new ServerSocket(0); int port = serverSocket.getLocalPort(); serverSocket .close(); //Find a garanteed open port by taking one and closing. Why doesn't Java allow me to get a list of open ports? HttpUtility.getRequest(new URL("http://localhost:" + port)).getResponseCode(); } @Test public void test404Page() throws Exception { assertEquals(404, HttpUtility.getRequest(new URL("http://xkcd.com/404")).getResponseCode()); } @Test public void test200Page() throws Exception { assertEquals(200, HttpUtility.getRequest(new URL("http://google.com")).getResponseCode()); } @Test(expected = UnknownHostException.class) public void testUnknownHost() throws Exception { HttpUtility.getRequest(new URL("http://googasdfasfdkjashfdkjahsfdle.com/")).getResponseCode(); } @Test public void testGetAsString() throws Exception { assertEquals("", HttpUtility.getRequestAsString(new URL("http://xkcd.com/404"))); } }
Fix an issue where the port 9999 might be occupied thus the test fails. Now the test finds a garanteed emtpy port.
Fix an issue where the port 9999 might be occupied thus the test fails. Now the test finds a garanteed emtpy port.
Java
bsd-3-clause
andrewmkrug/Selenium-Grid-Extras,smccarthy/Selenium-Grid-Extras,smccarthy/Selenium-Grid-Extras,smccarthy/Selenium-Grid-Extras,alexkogon/Selenium-Grid-Extras,groupon/Selenium-Grid-Extras,groupon/Selenium-Grid-Extras,alexkogon/Selenium-Grid-Extras,jivesoftware/Selenium-Grid-Extras,groupon/Selenium-Grid-Extras,alexkogon/Selenium-Grid-Extras,rgonalo/Selenium-Grid-Extras,jivesoftware/Selenium-Grid-Extras,rgonalo/Selenium-Grid-Extras,groupon/Selenium-Grid-Extras,jivesoftware/Selenium-Grid-Extras,rgonalo/Selenium-Grid-Extras,alexkogon/Selenium-Grid-Extras,jivesoftware/Selenium-Grid-Extras,smccarthy/Selenium-Grid-Extras,jivesoftware/Selenium-Grid-Extras,rgonalo/Selenium-Grid-Extras,alexkogon/Selenium-Grid-Extras,andrewmkrug/Selenium-Grid-Extras,andrewmkrug/Selenium-Grid-Extras,groupon/Selenium-Grid-Extras,andrewmkrug/Selenium-Grid-Extras,andrewmkrug/Selenium-Grid-Extras,smccarthy/Selenium-Grid-Extras,rgonalo/Selenium-Grid-Extras
java
## Code Before: package com.groupon.seleniumgridextras.utilities; import org.junit.Test; import java.net.ConnectException; import java.net.URL; import java.net.UnknownHostException; import static org.junit.Assert.assertEquals; /** * Created with IntelliJ IDEA. User: dima Date: 7/8/14 Time: 4:09 PM To change this template use * File | Settings | File Templates. */ public class HttpUtilityTest { @Test(expected=ConnectException.class) public void testConnectionRefusedError() throws Exception { HttpUtility.getRequest(new URL("http://localhost:9999")).getResponseCode(); } @Test public void test404Page() throws Exception { assertEquals(404, HttpUtility.getRequest(new URL("http://xkcd.com/404")).getResponseCode()); } @Test public void test200Page() throws Exception { assertEquals(200, HttpUtility.getRequest(new URL("http://google.com")).getResponseCode()); } @Test(expected = UnknownHostException.class) public void testUnknownHost() throws Exception { HttpUtility.getRequest(new URL("http://googasdfasfdkjashfdkjahsfdle.com/")).getResponseCode(); } @Test public void testGetAsString() throws Exception{ assertEquals("", HttpUtility.getRequestAsString(new URL("http://xkcd.com/404"))); } } ## Instruction: Fix an issue where the port 9999 might be occupied thus the test fails. Now the test finds a garanteed emtpy port. ## Code After: package com.groupon.seleniumgridextras.utilities; import org.junit.Test; import java.net.ConnectException; import java.net.ServerSocket; import java.net.URL; import java.net.UnknownHostException; import static org.junit.Assert.assertEquals; public class HttpUtilityTest { @Test(expected = ConnectException.class) public void testConnectionRefusedError() throws Exception { ServerSocket serverSocket = new ServerSocket(0); int port = serverSocket.getLocalPort(); serverSocket .close(); //Find a garanteed open port by taking one and closing. Why doesn't Java allow me to get a list of open ports? HttpUtility.getRequest(new URL("http://localhost:" + port)).getResponseCode(); } @Test public void test404Page() throws Exception { assertEquals(404, HttpUtility.getRequest(new URL("http://xkcd.com/404")).getResponseCode()); } @Test public void test200Page() throws Exception { assertEquals(200, HttpUtility.getRequest(new URL("http://google.com")).getResponseCode()); } @Test(expected = UnknownHostException.class) public void testUnknownHost() throws Exception { HttpUtility.getRequest(new URL("http://googasdfasfdkjashfdkjahsfdle.com/")).getResponseCode(); } @Test public void testGetAsString() throws Exception { assertEquals("", HttpUtility.getRequestAsString(new URL("http://xkcd.com/404"))); } }
... import org.junit.Test; import java.net.ConnectException; import java.net.ServerSocket; import java.net.URL; import java.net.UnknownHostException; import static org.junit.Assert.assertEquals; public class HttpUtilityTest { @Test(expected = ConnectException.class) public void testConnectionRefusedError() throws Exception { ServerSocket serverSocket = new ServerSocket(0); int port = serverSocket.getLocalPort(); serverSocket .close(); //Find a garanteed open port by taking one and closing. Why doesn't Java allow me to get a list of open ports? HttpUtility.getRequest(new URL("http://localhost:" + port)).getResponseCode(); } @Test ... } @Test public void testGetAsString() throws Exception { assertEquals("", HttpUtility.getRequestAsString(new URL("http://xkcd.com/404"))); } } ...
cba49af7fce05eb22fda3012f23c8fa8736fd022
polling_stations/apps/pollingstations/migrations/0009_customfinder.py
polling_stations/apps/pollingstations/migrations/0009_customfinder.py
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0006_residentialaddress_slug'), ] operations = [ migrations.CreateModel( name='CustomFinder', fields=[ ('area_code', models.CharField(serialize=False, max_length=9, primary_key=True)), ('base_url', models.CharField(max_length=255, blank=True)), ('can_pass_postcode', models.BooleanField(default=False)), ('message', models.TextField(blank=True)), ], ), ]
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0008_auto_20160415_1854'), ] operations = [ migrations.CreateModel( name='CustomFinder', fields=[ ('area_code', models.CharField(serialize=False, max_length=9, primary_key=True)), ('base_url', models.CharField(max_length=255, blank=True)), ('can_pass_postcode', models.BooleanField(default=False)), ('message', models.TextField(blank=True)), ], ), ]
Edit migration so it depends on 0008_auto_20160415_1854
Edit migration so it depends on 0008_auto_20160415_1854 Ensure the migrations will apply correctly without conflcit once merged Merging this branch is now blocked on PR #239
Python
bsd-3-clause
chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
python
## Code Before: from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0006_residentialaddress_slug'), ] operations = [ migrations.CreateModel( name='CustomFinder', fields=[ ('area_code', models.CharField(serialize=False, max_length=9, primary_key=True)), ('base_url', models.CharField(max_length=255, blank=True)), ('can_pass_postcode', models.BooleanField(default=False)), ('message', models.TextField(blank=True)), ], ), ] ## Instruction: Edit migration so it depends on 0008_auto_20160415_1854 Ensure the migrations will apply correctly without conflcit once merged Merging this branch is now blocked on PR #239 ## Code After: from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0008_auto_20160415_1854'), ] operations = [ migrations.CreateModel( name='CustomFinder', fields=[ ('area_code', models.CharField(serialize=False, max_length=9, primary_key=True)), ('base_url', models.CharField(max_length=255, blank=True)), ('can_pass_postcode', models.BooleanField(default=False)), ('message', models.TextField(blank=True)), ], ), ]
... class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0008_auto_20160415_1854'), ] operations = [ ...
4a6aaf8f2fec2dd9025030102851408ac993fa7d
client-common/src/main/java/org/realityforge/replicant/client/AggregateEntityLocator.java
client-common/src/main/java/org/realityforge/replicant/client/AggregateEntityLocator.java
package org.realityforge.replicant.client; import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; import static org.realityforge.braincheck.Guards.*; /** * A basic EntityLocator implementation that allows explicit per-type registration */ public class AggregateEntityLocator implements EntityLocator { private final ArrayList<EntityLocator> _entityLocators = new ArrayList<>(); protected final <T> void registerEntityLocator( @Nonnull final EntityLocator entityLocator ) { apiInvariant( () -> !_entityLocators.contains( entityLocator ), () -> "Attempting to register entityLocator " + entityLocator + " when already present." ); _entityLocators.add( entityLocator ); } /** * {@inheritDoc} */ @Nullable @Override public final <T> T findByID( @Nonnull final Class<T> type, @Nonnull final Object id ) { for ( final EntityLocator entityLocator : _entityLocators ) { final T entity = entityLocator.findByID( type, id ); if ( null != entity ) { return entity; } } return null; } /** * {@inheritDoc} */ @Nonnull @Override public <T> List<T> findAll( @Nonnull final Class<T> type ) { final ArrayList<T> results = new ArrayList<>(); for ( final EntityLocator entityLocator : _entityLocators ) { results.addAll( entityLocator.findAll( type ) ); } return results; } }
package org.realityforge.replicant.client; import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; import static org.realityforge.braincheck.Guards.*; /** * A basic EntityLocator implementation that allows explicit per-type registration */ public class AggregateEntityLocator implements EntityLocator { private final ArrayList<EntityLocator> _entityLocators = new ArrayList<>(); public AggregateEntityLocator( @Nonnull final EntityLocator... entityLocator ) { for ( final EntityLocator locator : entityLocator ) { registerEntityLocator( locator ); } } protected final <T> void registerEntityLocator( @Nonnull final EntityLocator entityLocator ) { apiInvariant( () -> !_entityLocators.contains( entityLocator ), () -> "Attempting to register entityLocator " + entityLocator + " when already present." ); _entityLocators.add( entityLocator ); } /** * {@inheritDoc} */ @Nullable @Override public final <T> T findByID( @Nonnull final Class<T> type, @Nonnull final Object id ) { for ( final EntityLocator entityLocator : _entityLocators ) { final T entity = entityLocator.findByID( type, id ); if ( null != entity ) { return entity; } } return null; } /** * {@inheritDoc} */ @Nonnull @Override public <T> List<T> findAll( @Nonnull final Class<T> type ) { final ArrayList<T> results = new ArrayList<>(); for ( final EntityLocator entityLocator : _entityLocators ) { results.addAll( entityLocator.findAll( type ) ); } return results; } }
Make it possible to register the locators via the constructor
Make it possible to register the locators via the constructor
Java
apache-2.0
realityforge/replicant,realityforge/replicant
java
## Code Before: package org.realityforge.replicant.client; import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; import static org.realityforge.braincheck.Guards.*; /** * A basic EntityLocator implementation that allows explicit per-type registration */ public class AggregateEntityLocator implements EntityLocator { private final ArrayList<EntityLocator> _entityLocators = new ArrayList<>(); protected final <T> void registerEntityLocator( @Nonnull final EntityLocator entityLocator ) { apiInvariant( () -> !_entityLocators.contains( entityLocator ), () -> "Attempting to register entityLocator " + entityLocator + " when already present." ); _entityLocators.add( entityLocator ); } /** * {@inheritDoc} */ @Nullable @Override public final <T> T findByID( @Nonnull final Class<T> type, @Nonnull final Object id ) { for ( final EntityLocator entityLocator : _entityLocators ) { final T entity = entityLocator.findByID( type, id ); if ( null != entity ) { return entity; } } return null; } /** * {@inheritDoc} */ @Nonnull @Override public <T> List<T> findAll( @Nonnull final Class<T> type ) { final ArrayList<T> results = new ArrayList<>(); for ( final EntityLocator entityLocator : _entityLocators ) { results.addAll( entityLocator.findAll( type ) ); } return results; } } ## Instruction: Make it possible to register the locators via the constructor ## Code After: package org.realityforge.replicant.client; import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; import static org.realityforge.braincheck.Guards.*; /** * A basic EntityLocator implementation that allows explicit per-type registration */ public class AggregateEntityLocator implements EntityLocator { private final ArrayList<EntityLocator> _entityLocators = new ArrayList<>(); public AggregateEntityLocator( @Nonnull final EntityLocator... entityLocator ) { for ( final EntityLocator locator : entityLocator ) { registerEntityLocator( locator ); } } protected final <T> void registerEntityLocator( @Nonnull final EntityLocator entityLocator ) { apiInvariant( () -> !_entityLocators.contains( entityLocator ), () -> "Attempting to register entityLocator " + entityLocator + " when already present." ); _entityLocators.add( entityLocator ); } /** * {@inheritDoc} */ @Nullable @Override public final <T> T findByID( @Nonnull final Class<T> type, @Nonnull final Object id ) { for ( final EntityLocator entityLocator : _entityLocators ) { final T entity = entityLocator.findByID( type, id ); if ( null != entity ) { return entity; } } return null; } /** * {@inheritDoc} */ @Nonnull @Override public <T> List<T> findAll( @Nonnull final Class<T> type ) { final ArrayList<T> results = new ArrayList<>(); for ( final EntityLocator entityLocator : _entityLocators ) { results.addAll( entityLocator.findAll( type ) ); } return results; } }
// ... existing code ... implements EntityLocator { private final ArrayList<EntityLocator> _entityLocators = new ArrayList<>(); public AggregateEntityLocator( @Nonnull final EntityLocator... entityLocator ) { for ( final EntityLocator locator : entityLocator ) { registerEntityLocator( locator ); } } protected final <T> void registerEntityLocator( @Nonnull final EntityLocator entityLocator ) { // ... rest of the code ...
88238b76084bb7d7e5f2c54a0b4fc56b446af1c4
src/page_freelist.c
src/page_freelist.c
void redislite_free_freelist(void *_db, void *_page) { redislite_page_string* page = (redislite_page_string*)_page; if (page == NULL) return; redislite_free(page); } void redislite_write_freelist(void *_db, unsigned char *data, void *_page) { redislite *db = (redislite*)_db; redislite_page_string* page = (redislite_page_string*)_page; if (page == NULL) return; data[0] = REDISLITE_PAGE_TYPE_FREELIST; redislite_put_4bytes(&data[1], 0); // reserverd redislite_put_4bytes(&data[5], page->right_page); int size = db->page_size-9; memset(&data[9], 0, size); } void *redislite_read_freelist(void *_db, unsigned char *data) { redislite_page_string* page = redislite_malloc(sizeof(redislite_page_string)); page->right_page = redislite_get_4bytes(&data[5]); return page; }
void redislite_free_freelist(void *_db, void *_page) { redislite_page_string* page = (redislite_page_string*)_page; if (page == NULL) return; redislite_free(page); } void redislite_write_freelist(void *_db, unsigned char *data, void *_page) { redislite *db = (redislite*)_db; redislite_page_string* page = (redislite_page_string*)_page; if (page == NULL) return; redislite_put_4bytes(&data[0], 0); // reserverd redislite_put_4bytes(&data[4], page->right_page); int size = db->page_size-8; memset(&data[8], 0, size); } void *redislite_read_freelist(void *_db, unsigned char *data) { redislite_page_string* page = redislite_malloc(sizeof(redislite_page_string)); page->right_page = redislite_get_4bytes(&data[8]); return page; }
Remove unused byte in freelist
Remove unused byte in freelist
C
bsd-2-clause
pombredanne/redislite,seppo0010/redislite,pombredanne/redislite,seppo0010/redislite
c
## Code Before: void redislite_free_freelist(void *_db, void *_page) { redislite_page_string* page = (redislite_page_string*)_page; if (page == NULL) return; redislite_free(page); } void redislite_write_freelist(void *_db, unsigned char *data, void *_page) { redislite *db = (redislite*)_db; redislite_page_string* page = (redislite_page_string*)_page; if (page == NULL) return; data[0] = REDISLITE_PAGE_TYPE_FREELIST; redislite_put_4bytes(&data[1], 0); // reserverd redislite_put_4bytes(&data[5], page->right_page); int size = db->page_size-9; memset(&data[9], 0, size); } void *redislite_read_freelist(void *_db, unsigned char *data) { redislite_page_string* page = redislite_malloc(sizeof(redislite_page_string)); page->right_page = redislite_get_4bytes(&data[5]); return page; } ## Instruction: Remove unused byte in freelist ## Code After: void redislite_free_freelist(void *_db, void *_page) { redislite_page_string* page = (redislite_page_string*)_page; if (page == NULL) return; redislite_free(page); } void redislite_write_freelist(void *_db, unsigned char *data, void *_page) { redislite *db = (redislite*)_db; redislite_page_string* page = (redislite_page_string*)_page; if (page == NULL) return; redislite_put_4bytes(&data[0], 0); // reserverd redislite_put_4bytes(&data[4], page->right_page); int size = db->page_size-8; memset(&data[8], 0, size); } void *redislite_read_freelist(void *_db, unsigned char *data) { redislite_page_string* page = redislite_malloc(sizeof(redislite_page_string)); page->right_page = redislite_get_4bytes(&data[8]); return page; }
... redislite_page_string* page = (redislite_page_string*)_page; if (page == NULL) return; redislite_put_4bytes(&data[0], 0); // reserverd redislite_put_4bytes(&data[4], page->right_page); int size = db->page_size-8; memset(&data[8], 0, size); } void *redislite_read_freelist(void *_db, unsigned char *data) { redislite_page_string* page = redislite_malloc(sizeof(redislite_page_string)); page->right_page = redislite_get_4bytes(&data[8]); return page; } ...
b1168ed34505c8bfa20e6679c06cc9fd0ae559d1
setup.py
setup.py
from setuptools import setup from fdep import __VERSION__ try: ldsc = open("README.md").read() except: ldsc = "" setup( name="fdep", packages=['fdep'], version=__VERSION__, author="Checkr", author_email="[email protected]", url="http://github.com/checkr/fdep", license="MIT LICENSE", description="Fdep is a simple, easy-to-use, production-ready tool/library written in Python to download datasets, misc. files for your machine learning projects.", long_description=ldsc, entry_points={ 'console_scripts': [ 'fdep = fdep.__main__:main' ] }, install_requires=[ 'PyYAML==3.12', 'boto3==1.4.0', 'requests==2.11.1', 'colorama==0.3.7', 'tqdm==4.8.4' ] )
from setuptools import setup from fdep import __VERSION__ try: ldsc = open("README.md").read() except: ldsc = "" setup( name="fdep", packages=['fdep'], version=__VERSION__, author="Checkr", author_email="[email protected]", url="http://github.com/checkr/fdep", license="MIT LICENSE", description="Fdep is a simple, easy-to-use, production-ready tool/library written in Python to download datasets, misc. files for your machine learning projects.", long_description=ldsc, entry_points={ 'console_scripts': ['fdep=fdep.__main__:main'] }, install_requires=[ 'PyYAML==3.12', 'boto3==1.4.0', 'requests==2.11.1', 'colorama==0.3.7', 'tqdm==4.8.4' ] )
Clean up a little bit
Clean up a little bit
Python
mit
checkr/fdep
python
## Code Before: from setuptools import setup from fdep import __VERSION__ try: ldsc = open("README.md").read() except: ldsc = "" setup( name="fdep", packages=['fdep'], version=__VERSION__, author="Checkr", author_email="[email protected]", url="http://github.com/checkr/fdep", license="MIT LICENSE", description="Fdep is a simple, easy-to-use, production-ready tool/library written in Python to download datasets, misc. files for your machine learning projects.", long_description=ldsc, entry_points={ 'console_scripts': [ 'fdep = fdep.__main__:main' ] }, install_requires=[ 'PyYAML==3.12', 'boto3==1.4.0', 'requests==2.11.1', 'colorama==0.3.7', 'tqdm==4.8.4' ] ) ## Instruction: Clean up a little bit ## Code After: from setuptools import setup from fdep import __VERSION__ try: ldsc = open("README.md").read() except: ldsc = "" setup( name="fdep", packages=['fdep'], version=__VERSION__, author="Checkr", author_email="[email protected]", url="http://github.com/checkr/fdep", license="MIT LICENSE", description="Fdep is a simple, easy-to-use, production-ready tool/library written in Python to download datasets, misc. files for your machine learning projects.", long_description=ldsc, entry_points={ 'console_scripts': ['fdep=fdep.__main__:main'] }, install_requires=[ 'PyYAML==3.12', 'boto3==1.4.0', 'requests==2.11.1', 'colorama==0.3.7', 'tqdm==4.8.4' ] )
# ... existing code ... description="Fdep is a simple, easy-to-use, production-ready tool/library written in Python to download datasets, misc. files for your machine learning projects.", long_description=ldsc, entry_points={ 'console_scripts': ['fdep=fdep.__main__:main'] }, install_requires=[ 'PyYAML==3.12', # ... rest of the code ...
1318b5497c51e10e196ff7b4f4093c4710b99831
backend/manager/modules/common/src/main/java/org/ovirt/engine/core/common/action/AttachNetworkToVdsGroupParameter.java
backend/manager/modules/common/src/main/java/org/ovirt/engine/core/common/action/AttachNetworkToVdsGroupParameter.java
package org.ovirt.engine.core.common.action; import org.ovirt.engine.core.common.businessentities.Network; import org.ovirt.engine.core.common.businessentities.VDSGroup; import org.ovirt.engine.core.common.businessentities.network_cluster; public class AttachNetworkToVdsGroupParameter extends NetworkClusterParameters { private static final long serialVersionUID = -2874549285727269806L; private Network _network; public AttachNetworkToVdsGroupParameter(VDSGroup group, Network net) { super(new network_cluster(group.getId(), net.getId(), null, // Cluster attachment data can sometimes be missing, so use defaults in that case. net.getCluster() == null ? false : net.getCluster().getis_display(), net.getCluster() == null ? true : net.getCluster().isRequired())); _network = net; } public Network getNetwork() { return _network; } public AttachNetworkToVdsGroupParameter() { } }
package org.ovirt.engine.core.common.action; import org.ovirt.engine.core.common.businessentities.Network; import org.ovirt.engine.core.common.businessentities.NetworkStatus; import org.ovirt.engine.core.common.businessentities.VDSGroup; import org.ovirt.engine.core.common.businessentities.network_cluster; public class AttachNetworkToVdsGroupParameter extends NetworkClusterParameters { private static final long serialVersionUID = -2874549285727269806L; private Network _network; public AttachNetworkToVdsGroupParameter(VDSGroup group, Network net) { super(new network_cluster(group.getId(), net.getId(), NetworkStatus.NonOperational, // Cluster attachment data can sometimes be missing, so use defaults in that case. net.getCluster() == null ? false : net.getCluster().getis_display(), net.getCluster() == null ? true : net.getCluster().isRequired())); _network = net; } public Network getNetwork() { return _network; } public AttachNetworkToVdsGroupParameter() { } }
Fix default network status on attach
core: Fix default network status on attach Network status is being saved to DB & later on updated, so need to save some default value and NonOperational is a reasonable choice. Change-Id: I17ea1594fe54203ce5d98fedd77dade64537f57c Signed-off-by: Mike Kolesnik <[email protected]>
Java
apache-2.0
yingyun001/ovirt-engine,eayun/ovirt-engine,yapengsong/ovirt-engine,zerodengxinchao/ovirt-engine,yapengsong/ovirt-engine,walteryang47/ovirt-engine,yingyun001/ovirt-engine,halober/ovirt-engine,walteryang47/ovirt-engine,yapengsong/ovirt-engine,OpenUniversity/ovirt-engine,zerodengxinchao/ovirt-engine,OpenUniversity/ovirt-engine,walteryang47/ovirt-engine,walteryang47/ovirt-engine,zerodengxinchao/ovirt-engine,eayun/ovirt-engine,walteryang47/ovirt-engine,eayun/ovirt-engine,zerodengxinchao/ovirt-engine,OpenUniversity/ovirt-engine,yingyun001/ovirt-engine,eayun/ovirt-engine,zerodengxinchao/ovirt-engine,yingyun001/ovirt-engine,halober/ovirt-engine,yingyun001/ovirt-engine,halober/ovirt-engine,OpenUniversity/ovirt-engine,yapengsong/ovirt-engine,yapengsong/ovirt-engine,eayun/ovirt-engine,OpenUniversity/ovirt-engine,halober/ovirt-engine
java
## Code Before: package org.ovirt.engine.core.common.action; import org.ovirt.engine.core.common.businessentities.Network; import org.ovirt.engine.core.common.businessentities.VDSGroup; import org.ovirt.engine.core.common.businessentities.network_cluster; public class AttachNetworkToVdsGroupParameter extends NetworkClusterParameters { private static final long serialVersionUID = -2874549285727269806L; private Network _network; public AttachNetworkToVdsGroupParameter(VDSGroup group, Network net) { super(new network_cluster(group.getId(), net.getId(), null, // Cluster attachment data can sometimes be missing, so use defaults in that case. net.getCluster() == null ? false : net.getCluster().getis_display(), net.getCluster() == null ? true : net.getCluster().isRequired())); _network = net; } public Network getNetwork() { return _network; } public AttachNetworkToVdsGroupParameter() { } } ## Instruction: core: Fix default network status on attach Network status is being saved to DB & later on updated, so need to save some default value and NonOperational is a reasonable choice. Change-Id: I17ea1594fe54203ce5d98fedd77dade64537f57c Signed-off-by: Mike Kolesnik <[email protected]> ## Code After: package org.ovirt.engine.core.common.action; import org.ovirt.engine.core.common.businessentities.Network; import org.ovirt.engine.core.common.businessentities.NetworkStatus; import org.ovirt.engine.core.common.businessentities.VDSGroup; import org.ovirt.engine.core.common.businessentities.network_cluster; public class AttachNetworkToVdsGroupParameter extends NetworkClusterParameters { private static final long serialVersionUID = -2874549285727269806L; private Network _network; public AttachNetworkToVdsGroupParameter(VDSGroup group, Network net) { super(new network_cluster(group.getId(), net.getId(), NetworkStatus.NonOperational, // Cluster attachment data can sometimes be missing, so use defaults in that case. net.getCluster() == null ? false : net.getCluster().getis_display(), net.getCluster() == null ? true : net.getCluster().isRequired())); _network = net; } public Network getNetwork() { return _network; } public AttachNetworkToVdsGroupParameter() { } }
# ... existing code ... package org.ovirt.engine.core.common.action; import org.ovirt.engine.core.common.businessentities.Network; import org.ovirt.engine.core.common.businessentities.NetworkStatus; import org.ovirt.engine.core.common.businessentities.VDSGroup; import org.ovirt.engine.core.common.businessentities.network_cluster; # ... modified code ... public AttachNetworkToVdsGroupParameter(VDSGroup group, Network net) { super(new network_cluster(group.getId(), net.getId(), NetworkStatus.NonOperational, // Cluster attachment data can sometimes be missing, so use defaults in that case. net.getCluster() == null ? false : net.getCluster().getis_display(), # ... rest of the code ...
927e2bc4b8774cdffd753f99f63086baf437a509
setup.py
setup.py
from pip.download import PipSession from pip.req import parse_requirements from setuptools import setup setup( name='aws-portknock', version='0.1', py_modules=['aws_portknock'], description='Port knocking for AWS security groups', author='Michel Alexandre Salim', author_email='[email protected]', url='https://github.com/michel-slm/aws-portknock', download_url='https://github.com/michel-slm/aws-portknock/tarball/0.1', keywords=['aws'], classifiers=[], install_requires=[str(r.req) for r in parse_requirements( 'requirements.txt', session=PipSession())], entry_points=''' [console_scripts] aws-portknock=aws_portknock:cli ''', )
import os from pip.download import PipSession from pip.req import parse_requirements from setuptools import setup BASE_DIR = os.path.dirname(os.path.realpath(__file__)) reqs_file = os.path.join(BASE_DIR, 'requirements.txt') install_reqs = parse_requirements(reqs_file, session=PipSession()) setup( name='aws-portknock', version='0.1', py_modules=['aws_portknock'], description='Port knocking for AWS security groups', author='Michel Alexandre Salim', author_email='[email protected]', url='https://github.com/michel-slm/aws-portknock', download_url='https://github.com/michel-slm/aws-portknock/tarball/0.1', keywords=['aws'], classifiers=[], install_requires=[str(r.req) for r in install_reqs], entry_points=''' [console_scripts] aws-portknock=aws_portknock:cli ''', )
Add path to requirements.txt so installation from pip succeeds
Add path to requirements.txt so installation from pip succeeds cf http://lorenamesa.com/packaging-my-first-python-egg.html
Python
mpl-2.0
michel-slm/aws-portknock
python
## Code Before: from pip.download import PipSession from pip.req import parse_requirements from setuptools import setup setup( name='aws-portknock', version='0.1', py_modules=['aws_portknock'], description='Port knocking for AWS security groups', author='Michel Alexandre Salim', author_email='[email protected]', url='https://github.com/michel-slm/aws-portknock', download_url='https://github.com/michel-slm/aws-portknock/tarball/0.1', keywords=['aws'], classifiers=[], install_requires=[str(r.req) for r in parse_requirements( 'requirements.txt', session=PipSession())], entry_points=''' [console_scripts] aws-portknock=aws_portknock:cli ''', ) ## Instruction: Add path to requirements.txt so installation from pip succeeds cf http://lorenamesa.com/packaging-my-first-python-egg.html ## Code After: import os from pip.download import PipSession from pip.req import parse_requirements from setuptools import setup BASE_DIR = os.path.dirname(os.path.realpath(__file__)) reqs_file = os.path.join(BASE_DIR, 'requirements.txt') install_reqs = parse_requirements(reqs_file, session=PipSession()) setup( name='aws-portknock', version='0.1', py_modules=['aws_portknock'], description='Port knocking for AWS security groups', author='Michel Alexandre Salim', author_email='[email protected]', url='https://github.com/michel-slm/aws-portknock', download_url='https://github.com/michel-slm/aws-portknock/tarball/0.1', keywords=['aws'], classifiers=[], install_requires=[str(r.req) for r in install_reqs], entry_points=''' [console_scripts] aws-portknock=aws_portknock:cli ''', )
# ... existing code ... import os from pip.download import PipSession from pip.req import parse_requirements from setuptools import setup BASE_DIR = os.path.dirname(os.path.realpath(__file__)) reqs_file = os.path.join(BASE_DIR, 'requirements.txt') install_reqs = parse_requirements(reqs_file, session=PipSession()) setup( name='aws-portknock', # ... modified code ... keywords=['aws'], classifiers=[], install_requires=[str(r.req) for r in install_reqs], entry_points=''' [console_scripts] aws-portknock=aws_portknock:cli # ... rest of the code ...
661d58c993cf958cb447f11dfba016d9436be14b
src/main/java/org/testng/internal/Version.java
src/main/java/org/testng/internal/Version.java
package org.testng.internal; public class Version { public static final String VERSION = "7.1.1-SNAPSHOT"; public static String getVersionString() { return VERSION; } public static void displayBanner() { System.out.println("...\n... TestNG " + getVersionString() + " by Cédric Beust ([email protected])\n...\n"); } }
package org.testng.internal; public class Version { public static final String VERSION = "7.0.1"; public static String getVersionString() { return VERSION; } public static void displayBanner() { System.out.println("...\n... TestNG " + getVersionString() + " by Cédric Beust ([email protected])\n...\n"); } }
Undo alteration of generated file
Undo alteration of generated file
Java
apache-2.0
cbeust/testng,cbeust/testng,krmahadevan/testng,cbeust/testng,cbeust/testng,krmahadevan/testng,krmahadevan/testng,krmahadevan/testng,cbeust/testng,krmahadevan/testng
java
## Code Before: package org.testng.internal; public class Version { public static final String VERSION = "7.1.1-SNAPSHOT"; public static String getVersionString() { return VERSION; } public static void displayBanner() { System.out.println("...\n... TestNG " + getVersionString() + " by Cédric Beust ([email protected])\n...\n"); } } ## Instruction: Undo alteration of generated file ## Code After: package org.testng.internal; public class Version { public static final String VERSION = "7.0.1"; public static String getVersionString() { return VERSION; } public static void displayBanner() { System.out.println("...\n... TestNG " + getVersionString() + " by Cédric Beust ([email protected])\n...\n"); } }
// ... existing code ... public class Version { public static final String VERSION = "7.0.1"; public static String getVersionString() { return VERSION; // ... rest of the code ...
212df9ba53b0d95c84ad0dde946c7e5bba56410d
src/main/java/net/vexelon/currencybg/srv/reports/TelegramReporter.java
src/main/java/net/vexelon/currencybg/srv/reports/TelegramReporter.java
package net.vexelon.currencybg.srv.reports; import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pengrad.telegrambot.Callback; import com.pengrad.telegrambot.TelegramBot; import com.pengrad.telegrambot.TelegramBotAdapter; import com.pengrad.telegrambot.request.SendDocument; import com.pengrad.telegrambot.response.SendResponse; import net.vexelon.currencybg.srv.GlobalConfig; public class TelegramReporter extends AbstractReporter { private static final Logger log = LoggerFactory.getLogger(TelegramReporter.class); public TelegramReporter(String name) { super(name); } public TelegramReporter() { this("Telegram"); } @Override public void send() throws IOException { TelegramBot bot = TelegramBotAdapter .build(/* "264497873:AAGjShbt3-CJkuSHJLwyMccTbvh-0PxNZ68" */GlobalConfig.INSTANCE.getBotToken()); bot.execute(new SendDocument(/* "@ceco88" */GlobalConfig.INSTANCE.getBotChannel(), buffer.toString().getBytes("UTF-8")), new Callback<SendDocument, SendResponse>() { @Override public void onFailure(SendDocument request, IOException e) { log.error("Telegram SendDocument failed!", e); }; @Override public void onResponse(SendDocument request, SendResponse response) { // do nothing }; }); } }
package net.vexelon.currencybg.srv.reports; import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pengrad.telegrambot.Callback; import com.pengrad.telegrambot.TelegramBot; import com.pengrad.telegrambot.TelegramBotAdapter; import com.pengrad.telegrambot.request.SendDocument; import com.pengrad.telegrambot.response.SendResponse; import net.vexelon.currencybg.srv.GlobalConfig; public class TelegramReporter extends AbstractReporter { private static final Logger log = LoggerFactory.getLogger(TelegramReporter.class); public TelegramReporter(String name) { super(name); } public TelegramReporter() { this("Telegram"); } @Override public void send() throws IOException { if (!GlobalConfig.INSTANCE.getBotToken().isEmpty() && !GlobalConfig.INSTANCE.getBotChannel().isEmpty()) { TelegramBot bot = TelegramBotAdapter.build(GlobalConfig.INSTANCE.getBotToken()); bot.execute(new SendDocument(/* "@ceco88" */GlobalConfig.INSTANCE.getBotChannel(), buffer.toString().getBytes("UTF-8")), new Callback<SendDocument, SendResponse>() { @Override public void onFailure(SendDocument request, IOException e) { log.error("Telegram SendDocument failed!", e); }; @Override public void onResponse(SendDocument request, SendResponse response) { // do nothing }; }); } } }
Send Telegram msg only if channel and bot properties are set
Send Telegram msg only if channel and bot properties are set
Java
agpl-3.0
vexelon-dot-net/currencybg.server
java
## Code Before: package net.vexelon.currencybg.srv.reports; import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pengrad.telegrambot.Callback; import com.pengrad.telegrambot.TelegramBot; import com.pengrad.telegrambot.TelegramBotAdapter; import com.pengrad.telegrambot.request.SendDocument; import com.pengrad.telegrambot.response.SendResponse; import net.vexelon.currencybg.srv.GlobalConfig; public class TelegramReporter extends AbstractReporter { private static final Logger log = LoggerFactory.getLogger(TelegramReporter.class); public TelegramReporter(String name) { super(name); } public TelegramReporter() { this("Telegram"); } @Override public void send() throws IOException { TelegramBot bot = TelegramBotAdapter .build(/* "264497873:AAGjShbt3-CJkuSHJLwyMccTbvh-0PxNZ68" */GlobalConfig.INSTANCE.getBotToken()); bot.execute(new SendDocument(/* "@ceco88" */GlobalConfig.INSTANCE.getBotChannel(), buffer.toString().getBytes("UTF-8")), new Callback<SendDocument, SendResponse>() { @Override public void onFailure(SendDocument request, IOException e) { log.error("Telegram SendDocument failed!", e); }; @Override public void onResponse(SendDocument request, SendResponse response) { // do nothing }; }); } } ## Instruction: Send Telegram msg only if channel and bot properties are set ## Code After: package net.vexelon.currencybg.srv.reports; import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.pengrad.telegrambot.Callback; import com.pengrad.telegrambot.TelegramBot; import com.pengrad.telegrambot.TelegramBotAdapter; import com.pengrad.telegrambot.request.SendDocument; import com.pengrad.telegrambot.response.SendResponse; import net.vexelon.currencybg.srv.GlobalConfig; public class TelegramReporter extends AbstractReporter { private static final Logger log = LoggerFactory.getLogger(TelegramReporter.class); public TelegramReporter(String name) { super(name); } public TelegramReporter() { this("Telegram"); } @Override public void send() throws IOException { if (!GlobalConfig.INSTANCE.getBotToken().isEmpty() && !GlobalConfig.INSTANCE.getBotChannel().isEmpty()) { TelegramBot bot = TelegramBotAdapter.build(GlobalConfig.INSTANCE.getBotToken()); bot.execute(new SendDocument(/* "@ceco88" */GlobalConfig.INSTANCE.getBotChannel(), buffer.toString().getBytes("UTF-8")), new Callback<SendDocument, SendResponse>() { @Override public void onFailure(SendDocument request, IOException e) { log.error("Telegram SendDocument failed!", e); }; @Override public void onResponse(SendDocument request, SendResponse response) { // do nothing }; }); } } }
// ... existing code ... @Override public void send() throws IOException { if (!GlobalConfig.INSTANCE.getBotToken().isEmpty() && !GlobalConfig.INSTANCE.getBotChannel().isEmpty()) { TelegramBot bot = TelegramBotAdapter.build(GlobalConfig.INSTANCE.getBotToken()); bot.execute(new SendDocument(/* "@ceco88" */GlobalConfig.INSTANCE.getBotChannel(), buffer.toString().getBytes("UTF-8")), new Callback<SendDocument, SendResponse>() { @Override public void onFailure(SendDocument request, IOException e) { log.error("Telegram SendDocument failed!", e); }; @Override public void onResponse(SendDocument request, SendResponse response) { // do nothing }; }); } } } // ... rest of the code ...
ba4ea2169a13d61d30c94e89db512a34bc0fe3b5
bluesky/tests/test_documents.py
bluesky/tests/test_documents.py
from bluesky.run_engine import RunEngine from bluesky.tests.utils import setup_test_run_engine from bluesky.examples import simple_scan, motor RE = setup_test_run_engine() def test_custom_metadata(): def assert_lion(name, doc): assert 'animal' in doc assert doc['animal'] == 'lion' RE(simple_scan(motor), animal='lion', subs={'start': assert_lion}) # Note: Because assert_lion is processed on the main thread, it can # fail the test. I checked by writing a failing version of it. - D.A.
import pytest import jsonschema from bluesky.run_engine import RunEngine from event_model import DocumentNames, schemas from bluesky.tests.utils import setup_test_run_engine from bluesky.utils import new_uid from bluesky.examples import simple_scan, motor RE = setup_test_run_engine() def test_custom_metadata(): def assert_lion(name, doc): assert 'animal' in doc assert doc['animal'] == 'lion' RE(simple_scan(motor), animal='lion', subs={'start': assert_lion}) # Note: Because assert_lion is processed on the main thread, it can # fail the test. I checked by writing a failing version of it. - D.A. def test_dots_not_allowed_in_keys(): doc = {'time': 0, 'uid': new_uid()} jsonschema.validate(doc, schemas[DocumentNames.start]) # Add a legal key. doc.update({'b': 'c'}) jsonschema.validate(doc, schemas[DocumentNames.start]) # Now add illegal key. doc.update({'b.': 'c'}) with pytest.raises(jsonschema.ValidationError): jsonschema.validate(doc, schemas[DocumentNames.start]) doc = {'time': 0, 'uid': new_uid(), 'data_keys': {'a': {'source': '', 'dtype': 'number', 'shape': []}}, 'run_start': new_uid()} jsonschema.validate(doc, schemas[DocumentNames.descriptor]) # Add a legal key. doc.update({'b': 'c'}) jsonschema.validate(doc, schemas[DocumentNames.descriptor]) # Now add illegal key. doc.update({'b.c': 'd'}) with pytest.raises(jsonschema.ValidationError): jsonschema.validate(doc, schemas[DocumentNames.descriptor]) doc = {'time': 0, 'uid': new_uid(), 'exit_status': 'success', 'reason': '', 'run_start': new_uid()} jsonschema.validate(doc, schemas[DocumentNames.stop]) # Add a legal key. doc.update({'b': 'c'}) jsonschema.validate(doc, schemas[DocumentNames.stop]) # Now add illegal key. doc.update({'.b': 'c'}) with pytest.raises(jsonschema.ValidationError): jsonschema.validate(doc, schemas[DocumentNames.stop])
Test that event_model forbids dots in key names.
TST: Test that event_model forbids dots in key names.
Python
bsd-3-clause
ericdill/bluesky,ericdill/bluesky
python
## Code Before: from bluesky.run_engine import RunEngine from bluesky.tests.utils import setup_test_run_engine from bluesky.examples import simple_scan, motor RE = setup_test_run_engine() def test_custom_metadata(): def assert_lion(name, doc): assert 'animal' in doc assert doc['animal'] == 'lion' RE(simple_scan(motor), animal='lion', subs={'start': assert_lion}) # Note: Because assert_lion is processed on the main thread, it can # fail the test. I checked by writing a failing version of it. - D.A. ## Instruction: TST: Test that event_model forbids dots in key names. ## Code After: import pytest import jsonschema from bluesky.run_engine import RunEngine from event_model import DocumentNames, schemas from bluesky.tests.utils import setup_test_run_engine from bluesky.utils import new_uid from bluesky.examples import simple_scan, motor RE = setup_test_run_engine() def test_custom_metadata(): def assert_lion(name, doc): assert 'animal' in doc assert doc['animal'] == 'lion' RE(simple_scan(motor), animal='lion', subs={'start': assert_lion}) # Note: Because assert_lion is processed on the main thread, it can # fail the test. I checked by writing a failing version of it. - D.A. def test_dots_not_allowed_in_keys(): doc = {'time': 0, 'uid': new_uid()} jsonschema.validate(doc, schemas[DocumentNames.start]) # Add a legal key. doc.update({'b': 'c'}) jsonschema.validate(doc, schemas[DocumentNames.start]) # Now add illegal key. doc.update({'b.': 'c'}) with pytest.raises(jsonschema.ValidationError): jsonschema.validate(doc, schemas[DocumentNames.start]) doc = {'time': 0, 'uid': new_uid(), 'data_keys': {'a': {'source': '', 'dtype': 'number', 'shape': []}}, 'run_start': new_uid()} jsonschema.validate(doc, schemas[DocumentNames.descriptor]) # Add a legal key. doc.update({'b': 'c'}) jsonschema.validate(doc, schemas[DocumentNames.descriptor]) # Now add illegal key. doc.update({'b.c': 'd'}) with pytest.raises(jsonschema.ValidationError): jsonschema.validate(doc, schemas[DocumentNames.descriptor]) doc = {'time': 0, 'uid': new_uid(), 'exit_status': 'success', 'reason': '', 'run_start': new_uid()} jsonschema.validate(doc, schemas[DocumentNames.stop]) # Add a legal key. doc.update({'b': 'c'}) jsonschema.validate(doc, schemas[DocumentNames.stop]) # Now add illegal key. doc.update({'.b': 'c'}) with pytest.raises(jsonschema.ValidationError): jsonschema.validate(doc, schemas[DocumentNames.stop])
# ... existing code ... import pytest import jsonschema from bluesky.run_engine import RunEngine from event_model import DocumentNames, schemas from bluesky.tests.utils import setup_test_run_engine from bluesky.utils import new_uid from bluesky.examples import simple_scan, motor # ... modified code ... RE(simple_scan(motor), animal='lion', subs={'start': assert_lion}) # Note: Because assert_lion is processed on the main thread, it can # fail the test. I checked by writing a failing version of it. - D.A. def test_dots_not_allowed_in_keys(): doc = {'time': 0, 'uid': new_uid()} jsonschema.validate(doc, schemas[DocumentNames.start]) # Add a legal key. doc.update({'b': 'c'}) jsonschema.validate(doc, schemas[DocumentNames.start]) # Now add illegal key. doc.update({'b.': 'c'}) with pytest.raises(jsonschema.ValidationError): jsonschema.validate(doc, schemas[DocumentNames.start]) doc = {'time': 0, 'uid': new_uid(), 'data_keys': {'a': {'source': '', 'dtype': 'number', 'shape': []}}, 'run_start': new_uid()} jsonschema.validate(doc, schemas[DocumentNames.descriptor]) # Add a legal key. doc.update({'b': 'c'}) jsonschema.validate(doc, schemas[DocumentNames.descriptor]) # Now add illegal key. doc.update({'b.c': 'd'}) with pytest.raises(jsonschema.ValidationError): jsonschema.validate(doc, schemas[DocumentNames.descriptor]) doc = {'time': 0, 'uid': new_uid(), 'exit_status': 'success', 'reason': '', 'run_start': new_uid()} jsonschema.validate(doc, schemas[DocumentNames.stop]) # Add a legal key. doc.update({'b': 'c'}) jsonschema.validate(doc, schemas[DocumentNames.stop]) # Now add illegal key. doc.update({'.b': 'c'}) with pytest.raises(jsonschema.ValidationError): jsonschema.validate(doc, schemas[DocumentNames.stop]) # ... rest of the code ...
0ee3427092052d963fb836a9f8215a7a4f5d8bfa
wordpress-shortcodes/src/main/java/org/wordpress/aztec/plugins/shortcodes/VideoShortcodePlugin.kt
wordpress-shortcodes/src/main/java/org/wordpress/aztec/plugins/shortcodes/VideoShortcodePlugin.kt
package org.wordpress.aztec.plugins.shortcodes import org.wordpress.aztec.plugins.html2visual.IHtmlPreprocessor import org.wordpress.aztec.plugins.visual2html.IHtmlPostprocessor class VideoShortcodePlugin : IHtmlPreprocessor, IHtmlPostprocessor { private val TAG = "video" override fun processHtmlBeforeParsing(source: String): String { return source.replace(Regex("\\[$TAG([^\\]]*)\\]"), "<$TAG$1 />") } override fun processHtmlAfterSerialization(source: String): String { return source.replace(Regex("<$TAG([^>]*)/?>"), "[$TAG$1]") } }
package org.wordpress.aztec.plugins.shortcodes import org.wordpress.aztec.plugins.html2visual.IHtmlPreprocessor import org.wordpress.aztec.plugins.visual2html.IHtmlPostprocessor class VideoShortcodePlugin : IHtmlPreprocessor, IHtmlPostprocessor { private val TAG = "video" override fun processHtmlBeforeParsing(source: String): String { return source.replace(Regex("\\[$TAG([^\\]]*)\\]"), "<$TAG$1 />") } override fun processHtmlAfterSerialization(source: String): String { return StringBuilder(source) .replace(Regex("<$TAG([^>]*(?<! )) */>"), "[$TAG$1]") .replace(Regex("<$TAG([^>]*(?<! )) *></$TAG>"), "[$TAG$1]") } }
Improve the video tag regex matching
Improve the video tag regex matching
Kotlin
mpl-2.0
wordpress-mobile/AztecEditor-Android,wordpress-mobile/AztecEditor-Android,wordpress-mobile/AztecEditor-Android
kotlin
## Code Before: package org.wordpress.aztec.plugins.shortcodes import org.wordpress.aztec.plugins.html2visual.IHtmlPreprocessor import org.wordpress.aztec.plugins.visual2html.IHtmlPostprocessor class VideoShortcodePlugin : IHtmlPreprocessor, IHtmlPostprocessor { private val TAG = "video" override fun processHtmlBeforeParsing(source: String): String { return source.replace(Regex("\\[$TAG([^\\]]*)\\]"), "<$TAG$1 />") } override fun processHtmlAfterSerialization(source: String): String { return source.replace(Regex("<$TAG([^>]*)/?>"), "[$TAG$1]") } } ## Instruction: Improve the video tag regex matching ## Code After: package org.wordpress.aztec.plugins.shortcodes import org.wordpress.aztec.plugins.html2visual.IHtmlPreprocessor import org.wordpress.aztec.plugins.visual2html.IHtmlPostprocessor class VideoShortcodePlugin : IHtmlPreprocessor, IHtmlPostprocessor { private val TAG = "video" override fun processHtmlBeforeParsing(source: String): String { return source.replace(Regex("\\[$TAG([^\\]]*)\\]"), "<$TAG$1 />") } override fun processHtmlAfterSerialization(source: String): String { return StringBuilder(source) .replace(Regex("<$TAG([^>]*(?<! )) */>"), "[$TAG$1]") .replace(Regex("<$TAG([^>]*(?<! )) *></$TAG>"), "[$TAG$1]") } }
// ... existing code ... } override fun processHtmlAfterSerialization(source: String): String { return StringBuilder(source) .replace(Regex("<$TAG([^>]*(?<! )) */>"), "[$TAG$1]") .replace(Regex("<$TAG([^>]*(?<! )) *></$TAG>"), "[$TAG$1]") } } // ... rest of the code ...
81bd740e60ce850d1617d2323b6e65960129ef0f
herana/forms.py
herana/forms.py
from django.contrib.auth.models import User from django import forms from models import ProjectDetail class ProjectDetailForm(forms.ModelForm): class Meta: model = ProjectDetail exclude = ('record_status', 'reporting_period') def _clean_fields(self): # If we are saving a draft, only the header field is required. if self.data['_draft']: for name, field in self.fields.items(): if not name == 'header': field.required = False super(ProjectDetailForm, self)._clean_fields()
from django.contrib.auth.models import User from django import forms from models import ProjectDetail class ProjectDetailForm(forms.ModelForm): class Meta: model = ProjectDetail exclude = ('record_status', 'reporting_period') def _clean_fields(self): # If we are saving a draft, only the header field is required. if '_draft' in self.data: for name, field in self.fields.items(): if not name == 'header': field.required = False super(ProjectDetailForm, self)._clean_fields()
Fix check for _draft key in request object
Fix check for _draft key in request object
Python
mit
Code4SA/herana,Code4SA/herana,Code4SA/herana,Code4SA/herana
python
## Code Before: from django.contrib.auth.models import User from django import forms from models import ProjectDetail class ProjectDetailForm(forms.ModelForm): class Meta: model = ProjectDetail exclude = ('record_status', 'reporting_period') def _clean_fields(self): # If we are saving a draft, only the header field is required. if self.data['_draft']: for name, field in self.fields.items(): if not name == 'header': field.required = False super(ProjectDetailForm, self)._clean_fields() ## Instruction: Fix check for _draft key in request object ## Code After: from django.contrib.auth.models import User from django import forms from models import ProjectDetail class ProjectDetailForm(forms.ModelForm): class Meta: model = ProjectDetail exclude = ('record_status', 'reporting_period') def _clean_fields(self): # If we are saving a draft, only the header field is required. if '_draft' in self.data: for name, field in self.fields.items(): if not name == 'header': field.required = False super(ProjectDetailForm, self)._clean_fields()
... def _clean_fields(self): # If we are saving a draft, only the header field is required. if '_draft' in self.data: for name, field in self.fields.items(): if not name == 'header': field.required = False ...
edb65bb8be45202ec4b1b0dbaeb4cbe0b50e1553
src/modules/comm.c
src/modules/comm.c
static void inbox_received_handler(DictionaryIterator *iter, void *context) { #if defined(PBL_COLOR) Tuple *background_t = dict_find(iter, AppKeyColorBackground); if(background_t) { data_set_color(ColorBackground, (GColor){ .argb = background_t->value->int32 }); } Tuple *sides_t = dict_find(iter, AppKeyColorSides); if(sides_t) { data_set_color(ColorSides, (GColor){ .argb = sides_t->value->int32 }); } Tuple *face_t = dict_find(iter, AppKeyColorFace); if(face_t) { data_set_color(ColorFace, (GColor){ .argb = face_t->value->int32 }); } #endif // Other settings Tuple *anim_t = dict_find(iter, AppKeyAnimations); if(anim_t) { data_set_animations(anim_t->value->int32 == 1); } Tuple *bluetooth_t = dict_find(iter, AppKeyBluetooth); if(bluetooth_t) { data_set_bluetooth_alert(bluetooth_t->value->int32 == 1); } // Quit to be reloaded window_stack_pop_all(true); } void comm_init(int inbox, int outbox) { app_message_register_inbox_received(inbox_received_handler); app_message_open(inbox, outbox); }
static void inbox_received_handler(DictionaryIterator *iter, void *context) { #if defined(PBL_COLOR) Tuple *background_t = dict_find(iter, AppKeyColorBackground); if(background_t) { data_set_color(ColorBackground, (GColor){ .argb = background_t->value->int8 }); } Tuple *sides_t = dict_find(iter, AppKeyColorSides); if(sides_t) { data_set_color(ColorSides, (GColor){ .argb = sides_t->value->int8 }); } Tuple *face_t = dict_find(iter, AppKeyColorFace); if(face_t) { data_set_color(ColorFace, (GColor){ .argb = face_t->value->int8 }); } #endif // Other settings Tuple *anim_t = dict_find(iter, AppKeyAnimations); if(anim_t) { data_set_animations(anim_t->value->int8 == 1); } Tuple *bluetooth_t = dict_find(iter, AppKeyBluetooth); if(bluetooth_t) { data_set_bluetooth_alert(bluetooth_t->value->int8 == 1); } // Quit to be reloaded window_stack_pop_all(true); } void comm_init(int inbox, int outbox) { app_message_register_inbox_received(inbox_received_handler); app_message_open(inbox, outbox); }
Use more reliable union value
Use more reliable union value
C
mit
C-D-Lewis/isotime-appstore,C-D-Lewis/isotime-appstore,C-D-Lewis/isotime-appstore,C-D-Lewis/isotime-appstore
c
## Code Before: static void inbox_received_handler(DictionaryIterator *iter, void *context) { #if defined(PBL_COLOR) Tuple *background_t = dict_find(iter, AppKeyColorBackground); if(background_t) { data_set_color(ColorBackground, (GColor){ .argb = background_t->value->int32 }); } Tuple *sides_t = dict_find(iter, AppKeyColorSides); if(sides_t) { data_set_color(ColorSides, (GColor){ .argb = sides_t->value->int32 }); } Tuple *face_t = dict_find(iter, AppKeyColorFace); if(face_t) { data_set_color(ColorFace, (GColor){ .argb = face_t->value->int32 }); } #endif // Other settings Tuple *anim_t = dict_find(iter, AppKeyAnimations); if(anim_t) { data_set_animations(anim_t->value->int32 == 1); } Tuple *bluetooth_t = dict_find(iter, AppKeyBluetooth); if(bluetooth_t) { data_set_bluetooth_alert(bluetooth_t->value->int32 == 1); } // Quit to be reloaded window_stack_pop_all(true); } void comm_init(int inbox, int outbox) { app_message_register_inbox_received(inbox_received_handler); app_message_open(inbox, outbox); } ## Instruction: Use more reliable union value ## Code After: static void inbox_received_handler(DictionaryIterator *iter, void *context) { #if defined(PBL_COLOR) Tuple *background_t = dict_find(iter, AppKeyColorBackground); if(background_t) { data_set_color(ColorBackground, (GColor){ .argb = background_t->value->int8 }); } Tuple *sides_t = dict_find(iter, AppKeyColorSides); if(sides_t) { data_set_color(ColorSides, (GColor){ .argb = sides_t->value->int8 }); } Tuple *face_t = dict_find(iter, AppKeyColorFace); if(face_t) { data_set_color(ColorFace, (GColor){ .argb = face_t->value->int8 }); } #endif // Other settings Tuple *anim_t = dict_find(iter, AppKeyAnimations); if(anim_t) { data_set_animations(anim_t->value->int8 == 1); } Tuple *bluetooth_t = dict_find(iter, AppKeyBluetooth); if(bluetooth_t) { data_set_bluetooth_alert(bluetooth_t->value->int8 == 1); } // Quit to be reloaded window_stack_pop_all(true); } void comm_init(int inbox, int outbox) { app_message_register_inbox_received(inbox_received_handler); app_message_open(inbox, outbox); }
... #if defined(PBL_COLOR) Tuple *background_t = dict_find(iter, AppKeyColorBackground); if(background_t) { data_set_color(ColorBackground, (GColor){ .argb = background_t->value->int8 }); } Tuple *sides_t = dict_find(iter, AppKeyColorSides); if(sides_t) { data_set_color(ColorSides, (GColor){ .argb = sides_t->value->int8 }); } Tuple *face_t = dict_find(iter, AppKeyColorFace); if(face_t) { data_set_color(ColorFace, (GColor){ .argb = face_t->value->int8 }); } #endif ... // Other settings Tuple *anim_t = dict_find(iter, AppKeyAnimations); if(anim_t) { data_set_animations(anim_t->value->int8 == 1); } Tuple *bluetooth_t = dict_find(iter, AppKeyBluetooth); if(bluetooth_t) { data_set_bluetooth_alert(bluetooth_t->value->int8 == 1); } // Quit to be reloaded ...
441145c74f51428568f3b44cce72df8b070d25bf
src/main/java/com/uwetrottmann/trakt/v2/entities/BaseEntity.java
src/main/java/com/uwetrottmann/trakt/v2/entities/BaseEntity.java
package com.uwetrottmann.trakt.v2.entities; import java.util.List; public abstract class BaseEntity { public String title; public Images images; public List<String> available_translations; }
package com.uwetrottmann.trakt.v2.entities; import org.joda.time.DateTime; import java.util.List; public abstract class BaseEntity { public String title; public DateTime updated_at; public Images images; public List<String> available_translations; }
Add updated_at property to base entity.
Add updated_at property to base entity.
Java
apache-2.0
UweTrottmann/trakt-java
java
## Code Before: package com.uwetrottmann.trakt.v2.entities; import java.util.List; public abstract class BaseEntity { public String title; public Images images; public List<String> available_translations; } ## Instruction: Add updated_at property to base entity. ## Code After: package com.uwetrottmann.trakt.v2.entities; import org.joda.time.DateTime; import java.util.List; public abstract class BaseEntity { public String title; public DateTime updated_at; public Images images; public List<String> available_translations; }
// ... existing code ... package com.uwetrottmann.trakt.v2.entities; import org.joda.time.DateTime; import java.util.List; // ... modified code ... public abstract class BaseEntity { public String title; public DateTime updated_at; public Images images; public List<String> available_translations; // ... rest of the code ...
c24fa91c900fc4f0d3ac5a10d10bfe5c57c9ef5c
errors.py
errors.py
class ParserError(Exception): """Raised when parsing input fails.""" class OpenParenError(ParserError): """Raised when there are too few opening parenthesis.""" @staticmethod def build(): return OpenParenError("too few opening parenthesis") class CloseParenError(ParserError): """Raised when there are too few closing parenthesis.""" @staticmethod def build(): return CloseParenError("too few closing parenthesis") class SymbolNotFoundError(Exception): """Raised when a symbol could not be found in an environment chain.""" @staticmethod def build(symbol): return SymbolNotFoundError("could not find symbol " + str(symbol)) class IncorrectArgumentCountError(Exception): """Raised when a function is called with the wrong number of arguments.""" @staticmethod def build(expected, actual): return IncorrectArgumentCountError("expected " + str(expected) + ", got " + str(actual)) class WrongArgumentTypeError(Exception): """Raised when an argument is of the wrong type.""" @staticmethod def build(arg, expected_class): return WrongArgumentTypeError("wrong argument type for " + str(arg) + ": expected " + expected_class.__name__.lower() + ", got " + arg.__class__.__name__.lower()) class ApplicationError(Exception): """Raised when a function could not be applied correctly."""
class ParserError(Exception): """Raised when parsing input fails.""" class OpenParenError(ParserError): """Raised when there are too few opening parenthesis.""" @staticmethod def build(): return OpenParenError("too few opening parenthesis") class CloseParenError(ParserError): """Raised when there are too few closing parenthesis.""" @staticmethod def build(): return CloseParenError("too few closing parenthesis") class SymbolNotFoundError(Exception): """Raised when a symbol could not be found in an environment chain.""" @staticmethod def build(symbol): return SymbolNotFoundError("could not find symbol " + str(symbol)) class IncorrectArgumentCountError(Exception): """Raised when a function is called with the wrong number of arguments.""" @staticmethod def build(expected, actual): return IncorrectArgumentCountError("expected " + str(expected) + ", got " + str(actual)) class WrongArgumentTypeError(Exception): """Raised when an argument is of the wrong type.""" @staticmethod def build(arg, expected_class): expected = "" if hasattr(expected_class, "__name__"): expected = expected_class.__name__ + "," else: # support multiple expected classes expected = "one of " expected += ", ".join(map(lambda x: x.__name__, expected_class)) expected += ";" return WrongArgumentTypeError("wrong argument type for " + str(arg) + ": expected " + expected.lower() + " got " + arg.__class__.__name__.lower()) class ApplicationError(Exception): """Raised when a function could not be applied correctly."""
Support class tuples as WATE args
Support class tuples as WATE args
Python
mit
jasontbradshaw/plinth
python
## Code Before: class ParserError(Exception): """Raised when parsing input fails.""" class OpenParenError(ParserError): """Raised when there are too few opening parenthesis.""" @staticmethod def build(): return OpenParenError("too few opening parenthesis") class CloseParenError(ParserError): """Raised when there are too few closing parenthesis.""" @staticmethod def build(): return CloseParenError("too few closing parenthesis") class SymbolNotFoundError(Exception): """Raised when a symbol could not be found in an environment chain.""" @staticmethod def build(symbol): return SymbolNotFoundError("could not find symbol " + str(symbol)) class IncorrectArgumentCountError(Exception): """Raised when a function is called with the wrong number of arguments.""" @staticmethod def build(expected, actual): return IncorrectArgumentCountError("expected " + str(expected) + ", got " + str(actual)) class WrongArgumentTypeError(Exception): """Raised when an argument is of the wrong type.""" @staticmethod def build(arg, expected_class): return WrongArgumentTypeError("wrong argument type for " + str(arg) + ": expected " + expected_class.__name__.lower() + ", got " + arg.__class__.__name__.lower()) class ApplicationError(Exception): """Raised when a function could not be applied correctly.""" ## Instruction: Support class tuples as WATE args ## Code After: class ParserError(Exception): """Raised when parsing input fails.""" class OpenParenError(ParserError): """Raised when there are too few opening parenthesis.""" @staticmethod def build(): return OpenParenError("too few opening parenthesis") class CloseParenError(ParserError): """Raised when there are too few closing parenthesis.""" @staticmethod def build(): return CloseParenError("too few closing parenthesis") class SymbolNotFoundError(Exception): """Raised when a symbol could not be found in an environment chain.""" @staticmethod def build(symbol): return SymbolNotFoundError("could not find symbol " + str(symbol)) class IncorrectArgumentCountError(Exception): """Raised when a function is called with the wrong number of arguments.""" @staticmethod def build(expected, actual): return IncorrectArgumentCountError("expected " + str(expected) + ", got " + str(actual)) class WrongArgumentTypeError(Exception): """Raised when an argument is of the wrong type.""" @staticmethod def build(arg, expected_class): expected = "" if hasattr(expected_class, "__name__"): expected = expected_class.__name__ + "," else: # support multiple expected classes expected = "one of " expected += ", ".join(map(lambda x: x.__name__, expected_class)) expected += ";" return WrongArgumentTypeError("wrong argument type for " + str(arg) + ": expected " + expected.lower() + " got " + arg.__class__.__name__.lower()) class ApplicationError(Exception): """Raised when a function could not be applied correctly."""
// ... existing code ... @staticmethod def build(arg, expected_class): expected = "" if hasattr(expected_class, "__name__"): expected = expected_class.__name__ + "," else: # support multiple expected classes expected = "one of " expected += ", ".join(map(lambda x: x.__name__, expected_class)) expected += ";" return WrongArgumentTypeError("wrong argument type for " + str(arg) + ": expected " + expected.lower() + " got " + arg.__class__.__name__.lower()) class ApplicationError(Exception): // ... rest of the code ...
5d36b16fde863cccf404f658f53eac600ac9ddb1
foomodules/link_harvester/common_handlers.py
foomodules/link_harvester/common_handlers.py
import re import socket import urllib from bs4 import BeautifulSoup WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/") def default_handler(metadata): return {key: getattr(metadata, key) for key in ["original_url", "url", "title", "description", "human_readable_type"]} def wurstball_handler(metadata): if WURSTBALL_RE.match(metadata.url) is None: return None ret = default_handler(metadata) soup = BeautifulSoup(metadata.buf) img_url = soup.find(id="content-main").img["src"] try: response = urllib.request.urlopen(img_url, timeout=5) img_data = response.read() except (socket.timeout, urllib.error.URLError, urllib.error.HTTPError): return ret mime_type = response.getheader("Content-Type") ret.update({"image_mime_type": mime_type, "image_buffer": img_data, "image_url": img_url}) return ret
import logging import re import socket import urllib from bs4 import BeautifulSoup logger = logging.getLogger(__name__) WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/") def default_handler(metadata): return {key: getattr(metadata, key) for key in ["original_url", "url", "title", "description", "human_readable_type"]} def wurstball_handler(metadata): if WURSTBALL_RE.match(metadata.url) is None: return None ret = default_handler(metadata) soup = BeautifulSoup(metadata.buf) img_url = soup.find(id="content-main").img["src"] try: response = urllib.request.urlopen(img_url, timeout=5) img_data = response.read() except (socket.timeout, urllib.error.URLError, urllib.error.HTTPError) as err: logger.warn("Could not download Wurstball image: {}".format(err)) return ret mime_type = response.getheader("Content-Type") ret.update({"image_mime_type": mime_type, "image_buffer": img_data, "image_url": img_url}) return ret
Print warning when wurstball downloads fail
Print warning when wurstball downloads fail
Python
mit
horazont/xmpp-crowd
python
## Code Before: import re import socket import urllib from bs4 import BeautifulSoup WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/") def default_handler(metadata): return {key: getattr(metadata, key) for key in ["original_url", "url", "title", "description", "human_readable_type"]} def wurstball_handler(metadata): if WURSTBALL_RE.match(metadata.url) is None: return None ret = default_handler(metadata) soup = BeautifulSoup(metadata.buf) img_url = soup.find(id="content-main").img["src"] try: response = urllib.request.urlopen(img_url, timeout=5) img_data = response.read() except (socket.timeout, urllib.error.URLError, urllib.error.HTTPError): return ret mime_type = response.getheader("Content-Type") ret.update({"image_mime_type": mime_type, "image_buffer": img_data, "image_url": img_url}) return ret ## Instruction: Print warning when wurstball downloads fail ## Code After: import logging import re import socket import urllib from bs4 import BeautifulSoup logger = logging.getLogger(__name__) WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/") def default_handler(metadata): return {key: getattr(metadata, key) for key in ["original_url", "url", "title", "description", "human_readable_type"]} def wurstball_handler(metadata): if WURSTBALL_RE.match(metadata.url) is None: return None ret = default_handler(metadata) soup = BeautifulSoup(metadata.buf) img_url = soup.find(id="content-main").img["src"] try: response = urllib.request.urlopen(img_url, timeout=5) img_data = response.read() except (socket.timeout, urllib.error.URLError, urllib.error.HTTPError) as err: logger.warn("Could not download Wurstball image: {}".format(err)) return ret mime_type = response.getheader("Content-Type") ret.update({"image_mime_type": mime_type, "image_buffer": img_data, "image_url": img_url}) return ret
// ... existing code ... import logging import re import socket import urllib from bs4 import BeautifulSoup logger = logging.getLogger(__name__) WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/") // ... modified code ... img_data = response.read() except (socket.timeout, urllib.error.URLError, urllib.error.HTTPError) as err: logger.warn("Could not download Wurstball image: {}".format(err)) return ret mime_type = response.getheader("Content-Type") // ... rest of the code ...
cb9d3d6c4608b6517c8476d66e68455a6abd8bb9
snippets/multiple-versions-parallel/src/test/java/org/camunda/bpm/example/multiple_versions_parallel/nonarquillian/InMemoryH2Test.java
snippets/multiple-versions-parallel/src/test/java/org/camunda/bpm/example/multiple_versions_parallel/nonarquillian/InMemoryH2Test.java
package org.camunda.bpm.example.multiple_versions_parallel.nonarquillian; import org.camunda.bpm.engine.impl.util.LogUtil; import org.camunda.bpm.engine.test.ProcessEngineTestCase; import org.camunda.bpm.engine.test.Deployment; /** * Test case starting an in-memory database-backed Process Engine. */ public class InMemoryH2Test extends ProcessEngineTestCase { private static final String PROCESS_DEFINITION_KEY = "multiple-versions-parallel"; // enable more detailed logging static { LogUtil.readJavaUtilLoggingConfigFromClasspath(); } /** * Just tests if the process definition is deployable. */ @Deployment(resources = "process.bpmn") public void testParsingAndDeployment() { // nothing is done here, as we just want to check for exceptions during deployment } }
package org.camunda.bpm.example.multiple_versions_parallel.nonarquillian; import java.io.IOException; import java.util.Properties; import java.util.ResourceBundle; import org.camunda.bpm.engine.impl.util.LogUtil; import org.camunda.bpm.engine.test.ProcessEngineTestCase; import org.camunda.bpm.engine.test.Deployment; /** * Test case starting an in-memory database-backed Process Engine. */ public class InMemoryH2Test extends ProcessEngineTestCase { private static final String PROCESS_DEFINITION_KEY = "multiple-versions-parallel-v"; // enable more detailed logging static { LogUtil.readJavaUtilLoggingConfigFromClasspath(); } /** * Just tests if the process definition is deployable. * @throws IOException */ @Deployment(resources = "process.bpmn") public void testParsingAndDeployment() throws IOException { Properties version = new Properties(); version.load(this.getClass().getResourceAsStream("/version.properties")); runtimeService.startProcessInstanceByKey(PROCESS_DEFINITION_KEY + version.getProperty("maven.version.major") + "." + version.getProperty("maven.version.minor")); } }
Access version in test case
Access version in test case
Java
apache-2.0
nagyistoce/camunda-consulting,camunda/camunda-consulting,camunda/camunda-consulting,nagyistoce/camunda-consulting,plexiti/camunda-consulting,plexiti/camunda-consulting,camunda/camunda-consulting,nagyistoce/camunda-consulting,plexiti/camunda-consulting,camunda/camunda-consulting
java
## Code Before: package org.camunda.bpm.example.multiple_versions_parallel.nonarquillian; import org.camunda.bpm.engine.impl.util.LogUtil; import org.camunda.bpm.engine.test.ProcessEngineTestCase; import org.camunda.bpm.engine.test.Deployment; /** * Test case starting an in-memory database-backed Process Engine. */ public class InMemoryH2Test extends ProcessEngineTestCase { private static final String PROCESS_DEFINITION_KEY = "multiple-versions-parallel"; // enable more detailed logging static { LogUtil.readJavaUtilLoggingConfigFromClasspath(); } /** * Just tests if the process definition is deployable. */ @Deployment(resources = "process.bpmn") public void testParsingAndDeployment() { // nothing is done here, as we just want to check for exceptions during deployment } } ## Instruction: Access version in test case ## Code After: package org.camunda.bpm.example.multiple_versions_parallel.nonarquillian; import java.io.IOException; import java.util.Properties; import java.util.ResourceBundle; import org.camunda.bpm.engine.impl.util.LogUtil; import org.camunda.bpm.engine.test.ProcessEngineTestCase; import org.camunda.bpm.engine.test.Deployment; /** * Test case starting an in-memory database-backed Process Engine. */ public class InMemoryH2Test extends ProcessEngineTestCase { private static final String PROCESS_DEFINITION_KEY = "multiple-versions-parallel-v"; // enable more detailed logging static { LogUtil.readJavaUtilLoggingConfigFromClasspath(); } /** * Just tests if the process definition is deployable. * @throws IOException */ @Deployment(resources = "process.bpmn") public void testParsingAndDeployment() throws IOException { Properties version = new Properties(); version.load(this.getClass().getResourceAsStream("/version.properties")); runtimeService.startProcessInstanceByKey(PROCESS_DEFINITION_KEY + version.getProperty("maven.version.major") + "." + version.getProperty("maven.version.minor")); } }
// ... existing code ... package org.camunda.bpm.example.multiple_versions_parallel.nonarquillian; import java.io.IOException; import java.util.Properties; import java.util.ResourceBundle; import org.camunda.bpm.engine.impl.util.LogUtil; import org.camunda.bpm.engine.test.ProcessEngineTestCase; // ... modified code ... */ public class InMemoryH2Test extends ProcessEngineTestCase { private static final String PROCESS_DEFINITION_KEY = "multiple-versions-parallel-v"; // enable more detailed logging static { ... /** * Just tests if the process definition is deployable. * @throws IOException */ @Deployment(resources = "process.bpmn") public void testParsingAndDeployment() throws IOException { Properties version = new Properties(); version.load(this.getClass().getResourceAsStream("/version.properties")); runtimeService.startProcessInstanceByKey(PROCESS_DEFINITION_KEY + version.getProperty("maven.version.major") + "." + version.getProperty("maven.version.minor")); } } // ... rest of the code ...
a456449c5a30ea9ad9af308ea407246425ad288e
students/crobison/session04/file_lab.py
students/crobison/session04/file_lab.py
import os cwd = os.getcwd() # write a program which prints the full path to all files # in the current directory, one per line for item in os.listdir(cwd): print(cwd + "/" + item) # write a program which copies a file from a source, to a # destination (without using shutil, or the OS copy command) file = open('file_lab01.txt', 'r') file_text = file.read() file_new = open('file_lab02.txt', 'w') file_new.write(file_text) file.close() file_new.close() # advanced: make it work for any size file: i.e. don’t read # the entire contents of the file into memory at once. file = open('file_lab01.txt', 'r') file_new = open('file_lab02.txt', 'w') file_text = file.readline() for line in file_text: file_new.write(line) line = file.readline() file.close() file_new.close() # not working correctl, second try: print('second try:') file_new = open('file_labe02.txt', 'w') with open('file_lab01.txt', 'r') as f: for line in f: file_text = f.readline() file_new.write(line) file_new.close()
import os cwd = os.getcwd() # write a program which prints the full path to all files # in the current directory, one per line for item in os.listdir(cwd): print(cwd + "/" + item) # write a program which copies a file from a source, to a # destination (without using shutil, or the OS copy command) file = open('file_lab01.txt', 'r') file_text = file.read() file_new = open('file_lab02.txt', 'w') file_new.write(file_text) file.close() file_new.close() # advanced: make it work for any size file: i.e. don’t read # the entire contents of the file into memory at once. with open('file_lab01.txt','r') as r, open('file_lab02.txt', 'w') as w: for line in r: w.write(line) r.close() w.close()
Fix section to read and write large files.
Fix section to read and write large files.
Python
unlicense
UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,Baumelbi/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,Baumelbi/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016
python
## Code Before: import os cwd = os.getcwd() # write a program which prints the full path to all files # in the current directory, one per line for item in os.listdir(cwd): print(cwd + "/" + item) # write a program which copies a file from a source, to a # destination (without using shutil, or the OS copy command) file = open('file_lab01.txt', 'r') file_text = file.read() file_new = open('file_lab02.txt', 'w') file_new.write(file_text) file.close() file_new.close() # advanced: make it work for any size file: i.e. don’t read # the entire contents of the file into memory at once. file = open('file_lab01.txt', 'r') file_new = open('file_lab02.txt', 'w') file_text = file.readline() for line in file_text: file_new.write(line) line = file.readline() file.close() file_new.close() # not working correctl, second try: print('second try:') file_new = open('file_labe02.txt', 'w') with open('file_lab01.txt', 'r') as f: for line in f: file_text = f.readline() file_new.write(line) file_new.close() ## Instruction: Fix section to read and write large files. ## Code After: import os cwd = os.getcwd() # write a program which prints the full path to all files # in the current directory, one per line for item in os.listdir(cwd): print(cwd + "/" + item) # write a program which copies a file from a source, to a # destination (without using shutil, or the OS copy command) file = open('file_lab01.txt', 'r') file_text = file.read() file_new = open('file_lab02.txt', 'w') file_new.write(file_text) file.close() file_new.close() # advanced: make it work for any size file: i.e. don’t read # the entire contents of the file into memory at once. with open('file_lab01.txt','r') as r, open('file_lab02.txt', 'w') as w: for line in r: w.write(line) r.close() w.close()
# ... existing code ... # advanced: make it work for any size file: i.e. don’t read # the entire contents of the file into memory at once. with open('file_lab01.txt','r') as r, open('file_lab02.txt', 'w') as w: for line in r: w.write(line) r.close() w.close() # ... rest of the code ...
42869823b4af024906606c5caf50e5dc5de69a57
api/mcapi/user/projects.py
api/mcapi/user/projects.py
from ..mcapp import app from ..decorators import apikey, jsonp from flask import g import rethinkdb as r #from .. import dmutil from .. import args from ..utils import error_response @app.route('/v1.0/user/<user>/projects', methods=['GET']) @apikey @jsonp def get_all_projects(user): rr = r.table('projects').filter({'owner': user}) rr = args.add_all_arg_options(rr) items = list(rr.run(g.conn, time_format='raw')) return args.json_as_format_arg(items) @app.route('/v1.0/user/<user>/project/<project_id>/datafiles') @apikey @jsonp def get_all_datafiles_for_project(user, project_id): project = r.table('projects').get(project_id).run(g.conn) if project is None: return error_response(400) if project['owner'] != user: return error_response(400) return ""
from ..mcapp import app from ..decorators import apikey, jsonp from flask import g import rethinkdb as r #from .. import dmutil from .. import args from ..utils import error_response @app.route('/v1.0/user/<user>/projects', methods=['GET']) @apikey @jsonp def get_all_projects(user): rr = r.table('projects').filter({'owner': user}) rr = args.add_all_arg_options(rr) items = list(rr.run(g.conn, time_format='raw')) return args.json_as_format_arg(items) @app.route('/v1.0/user/<user>/projects/<project_id>/datafiles') @apikey @jsonp def get_all_datafiles_for_project(user, project_id): project = r.table('projects').get(project_id).run(g.conn) if project is None: return error_response(400) if project['owner'] != user: return error_response(400) return "" @app.route('/v1.0/user/<user>/projects/<project_id>/datadirs') @apikey @jsonp def get_datadirs_for_project(user, project_id): rr = r.table('project2datadir').filter({'project_id': project_id}) rr = rr.eq_join('project_id', r.table('projects')).zip() rr = rr.eq_join('datadir_id', r.table('datadirs')).zip() selection = list(rr.run(g.conn, time_format='raw')) if len(selection) > 0 and selection[0]['owner'] == user: return args.json_as_format_arg(selection) return args.json_as_format_arg([])
Add call to get datadirs for a project.
Add call to get datadirs for a project.
Python
mit
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
python
## Code Before: from ..mcapp import app from ..decorators import apikey, jsonp from flask import g import rethinkdb as r #from .. import dmutil from .. import args from ..utils import error_response @app.route('/v1.0/user/<user>/projects', methods=['GET']) @apikey @jsonp def get_all_projects(user): rr = r.table('projects').filter({'owner': user}) rr = args.add_all_arg_options(rr) items = list(rr.run(g.conn, time_format='raw')) return args.json_as_format_arg(items) @app.route('/v1.0/user/<user>/project/<project_id>/datafiles') @apikey @jsonp def get_all_datafiles_for_project(user, project_id): project = r.table('projects').get(project_id).run(g.conn) if project is None: return error_response(400) if project['owner'] != user: return error_response(400) return "" ## Instruction: Add call to get datadirs for a project. ## Code After: from ..mcapp import app from ..decorators import apikey, jsonp from flask import g import rethinkdb as r #from .. import dmutil from .. import args from ..utils import error_response @app.route('/v1.0/user/<user>/projects', methods=['GET']) @apikey @jsonp def get_all_projects(user): rr = r.table('projects').filter({'owner': user}) rr = args.add_all_arg_options(rr) items = list(rr.run(g.conn, time_format='raw')) return args.json_as_format_arg(items) @app.route('/v1.0/user/<user>/projects/<project_id>/datafiles') @apikey @jsonp def get_all_datafiles_for_project(user, project_id): project = r.table('projects').get(project_id).run(g.conn) if project is None: return error_response(400) if project['owner'] != user: return error_response(400) return "" @app.route('/v1.0/user/<user>/projects/<project_id>/datadirs') @apikey @jsonp def get_datadirs_for_project(user, project_id): rr = r.table('project2datadir').filter({'project_id': project_id}) rr = rr.eq_join('project_id', r.table('projects')).zip() rr = rr.eq_join('datadir_id', r.table('datadirs')).zip() selection = list(rr.run(g.conn, time_format='raw')) if len(selection) > 0 and selection[0]['owner'] == user: return args.json_as_format_arg(selection) return args.json_as_format_arg([])
... items = list(rr.run(g.conn, time_format='raw')) return args.json_as_format_arg(items) @app.route('/v1.0/user/<user>/projects/<project_id>/datafiles') @apikey @jsonp def get_all_datafiles_for_project(user, project_id): ... if project['owner'] != user: return error_response(400) return "" @app.route('/v1.0/user/<user>/projects/<project_id>/datadirs') @apikey @jsonp def get_datadirs_for_project(user, project_id): rr = r.table('project2datadir').filter({'project_id': project_id}) rr = rr.eq_join('project_id', r.table('projects')).zip() rr = rr.eq_join('datadir_id', r.table('datadirs')).zip() selection = list(rr.run(g.conn, time_format='raw')) if len(selection) > 0 and selection[0]['owner'] == user: return args.json_as_format_arg(selection) return args.json_as_format_arg([]) ...
8dae2049c96932855cc0162437d799e258f94a53
test/absolute_import/local_module.py
test/absolute_import/local_module.py
from __future__ import absolute_import import unittest # this is stdlib unittest, but jedi gets the local one class Assertions(unittest.TestCase): pass
from __future__ import absolute_import import unittest class Assertions(unittest.TestCase): pass
Fix inaccuracy in test comment, since jedi now does the right thing
Fix inaccuracy in test comment, since jedi now does the right thing
Python
mit
dwillmer/jedi,flurischt/jedi,mfussenegger/jedi,tjwei/jedi,flurischt/jedi,jonashaag/jedi,jonashaag/jedi,mfussenegger/jedi,tjwei/jedi,WoLpH/jedi,WoLpH/jedi,dwillmer/jedi
python
## Code Before: from __future__ import absolute_import import unittest # this is stdlib unittest, but jedi gets the local one class Assertions(unittest.TestCase): pass ## Instruction: Fix inaccuracy in test comment, since jedi now does the right thing ## Code After: from __future__ import absolute_import import unittest class Assertions(unittest.TestCase): pass
... from __future__ import absolute_import import unittest class Assertions(unittest.TestCase): ...
5b4473a7d1748e54d7c097b818ecb4fbb2080497
src/main/java/info/u_team/u_team_core/data/ExistingFileHelperWithForge.java
src/main/java/info/u_team/u_team_core/data/ExistingFileHelperWithForge.java
package info.u_team.u_team_core.data; import java.io.IOException; import java.util.Arrays; import net.minecraft.resources.*; import net.minecraft.util.ResourceLocation; import net.minecraftforge.common.data.ExistingFileHelper; import net.minecraftforge.fml.loading.FMLLoader; public class ExistingFileHelperWithForge extends ExistingFileHelper { private final ExistingFileHelper existingFileHelper; public ExistingFileHelperWithForge(ExistingFileHelper helper) { super(Arrays.asList(FMLLoader.getForgePath()), helper.isEnabled()); this.existingFileHelper = helper; } public boolean exists(ResourceLocation loc, ResourcePackType type, String pathSuffix, String pathPrefix) { final boolean exists = existingFileHelper.exists(loc, type, pathSuffix, pathPrefix); if (!exists) { return super.exists(loc, type, pathSuffix, pathPrefix); } return exists; } public IResource getResource(ResourceLocation loc, ResourcePackType type, String pathSuffix, String pathPrefix) throws IOException { try { return existingFileHelper.getResource(loc, type, pathSuffix, pathPrefix); } catch (IOException ex) { return super.getResource(loc, type, pathSuffix, pathPrefix); } } }
package info.u_team.u_team_core.data; import java.io.IOException; import java.util.Arrays; import net.minecraft.resources.*; import net.minecraft.util.ResourceLocation; import net.minecraftforge.common.data.ExistingFileHelper; import net.minecraftforge.fml.loading.FMLLoader; class ExistingFileHelperWithForge extends ExistingFileHelper { private final ExistingFileHelper existingFileHelper; protected ExistingFileHelperWithForge(ExistingFileHelper helper) { super(Arrays.asList(FMLLoader.getForgePath()), helper.isEnabled()); this.existingFileHelper = helper; } public boolean exists(ResourceLocation loc, ResourcePackType type, String pathSuffix, String pathPrefix) { final boolean exists = existingFileHelper.exists(loc, type, pathSuffix, pathPrefix); if (!exists) { return super.exists(loc, type, pathSuffix, pathPrefix); } return exists; } public IResource getResource(ResourceLocation loc, ResourcePackType type, String pathSuffix, String pathPrefix) throws IOException { try { return existingFileHelper.getResource(loc, type, pathSuffix, pathPrefix); } catch (IOException ex) { return super.getResource(loc, type, pathSuffix, pathPrefix); } } }
Make the class package private
Make the class package private
Java
apache-2.0
MC-U-Team/U-Team-Core,MC-U-Team/U-Team-Core
java
## Code Before: package info.u_team.u_team_core.data; import java.io.IOException; import java.util.Arrays; import net.minecraft.resources.*; import net.minecraft.util.ResourceLocation; import net.minecraftforge.common.data.ExistingFileHelper; import net.minecraftforge.fml.loading.FMLLoader; public class ExistingFileHelperWithForge extends ExistingFileHelper { private final ExistingFileHelper existingFileHelper; public ExistingFileHelperWithForge(ExistingFileHelper helper) { super(Arrays.asList(FMLLoader.getForgePath()), helper.isEnabled()); this.existingFileHelper = helper; } public boolean exists(ResourceLocation loc, ResourcePackType type, String pathSuffix, String pathPrefix) { final boolean exists = existingFileHelper.exists(loc, type, pathSuffix, pathPrefix); if (!exists) { return super.exists(loc, type, pathSuffix, pathPrefix); } return exists; } public IResource getResource(ResourceLocation loc, ResourcePackType type, String pathSuffix, String pathPrefix) throws IOException { try { return existingFileHelper.getResource(loc, type, pathSuffix, pathPrefix); } catch (IOException ex) { return super.getResource(loc, type, pathSuffix, pathPrefix); } } } ## Instruction: Make the class package private ## Code After: package info.u_team.u_team_core.data; import java.io.IOException; import java.util.Arrays; import net.minecraft.resources.*; import net.minecraft.util.ResourceLocation; import net.minecraftforge.common.data.ExistingFileHelper; import net.minecraftforge.fml.loading.FMLLoader; class ExistingFileHelperWithForge extends ExistingFileHelper { private final ExistingFileHelper existingFileHelper; protected ExistingFileHelperWithForge(ExistingFileHelper helper) { super(Arrays.asList(FMLLoader.getForgePath()), helper.isEnabled()); this.existingFileHelper = helper; } public boolean exists(ResourceLocation loc, ResourcePackType type, String pathSuffix, String pathPrefix) { final boolean exists = existingFileHelper.exists(loc, type, pathSuffix, pathPrefix); if (!exists) { return super.exists(loc, type, pathSuffix, pathPrefix); } return exists; } public IResource getResource(ResourceLocation loc, ResourcePackType type, String pathSuffix, String pathPrefix) throws IOException { try { return existingFileHelper.getResource(loc, type, pathSuffix, pathPrefix); } catch (IOException ex) { return super.getResource(loc, type, pathSuffix, pathPrefix); } } }
... import net.minecraftforge.common.data.ExistingFileHelper; import net.minecraftforge.fml.loading.FMLLoader; class ExistingFileHelperWithForge extends ExistingFileHelper { private final ExistingFileHelper existingFileHelper; protected ExistingFileHelperWithForge(ExistingFileHelper helper) { super(Arrays.asList(FMLLoader.getForgePath()), helper.isEnabled()); this.existingFileHelper = helper; } ...
15360ca3852bd597b4ba645bb3b770edb86bcc53
junit-commons/src/test/java/org/junit/gen5/commons/util/FindClassesInPackageTest.java
junit-commons/src/test/java/org/junit/gen5/commons/util/FindClassesInPackageTest.java
/* * Copyright 2015 the original author or authors. * * All rights reserved. This program and the accompanying materials are * made available under the terms of the Eclipse Public License v1.0 which * accompanies this distribution and is available at * * http://www.eclipse.org/legal/epl-v10.html */ package org.junit.gen5.commons.util; import java.io.IOException; import java.util.Arrays; import java.util.List; import org.junit.Assert; import org.junit.Test; public class FindClassesInPackageTest { @Test public void findAllClassesInThisPackage() throws IOException, ClassNotFoundException { List<Class<?>> classes = Arrays.asList(ReflectionUtils.findAllClassesInPackage("org.junit.gen5.commons")); System.out.println("Number of classes found: " + classes.size()); for (Class<?> clazz : classes) { System.out.println(clazz.getName()); } Assert.assertTrue("Should be at least 20 classes", classes.size() >= 20); Assert.assertTrue(classes.contains(NestedClassToBeFound.class)); Assert.assertTrue(classes.contains(MemberClassToBeFound.class)); } class MemberClassToBeFound { } static class NestedClassToBeFound { } }
/* * Copyright 2015 the original author or authors. * * All rights reserved. This program and the accompanying materials are * made available under the terms of the Eclipse Public License v1.0 which * accompanies this distribution and is available at * * http://www.eclipse.org/legal/epl-v10.html */ package org.junit.gen5.commons.util; import java.io.IOException; import java.util.Arrays; import java.util.List; import org.junit.Assert; import org.junit.Test; public class FindClassesInPackageTest { @Test public void findAllClassesInThisPackage() throws IOException, ClassNotFoundException { List<Class<?>> classes = Arrays.asList(ReflectionUtils.findAllClassesInPackage("org.junit.gen5.commons")); Assert.assertTrue("Should be at least 20 classes", classes.size() >= 20); Assert.assertTrue(classes.contains(NestedClassToBeFound.class)); Assert.assertTrue(classes.contains(MemberClassToBeFound.class)); } class MemberClassToBeFound { } static class NestedClassToBeFound { } }
Remove printing to System.out in test
Remove printing to System.out in test
Java
epl-1.0
marcphilipp/junit-lambda,sbrannen/junit-lambda,junit-team/junit-lambda,marcphilipp/junit5
java
## Code Before: /* * Copyright 2015 the original author or authors. * * All rights reserved. This program and the accompanying materials are * made available under the terms of the Eclipse Public License v1.0 which * accompanies this distribution and is available at * * http://www.eclipse.org/legal/epl-v10.html */ package org.junit.gen5.commons.util; import java.io.IOException; import java.util.Arrays; import java.util.List; import org.junit.Assert; import org.junit.Test; public class FindClassesInPackageTest { @Test public void findAllClassesInThisPackage() throws IOException, ClassNotFoundException { List<Class<?>> classes = Arrays.asList(ReflectionUtils.findAllClassesInPackage("org.junit.gen5.commons")); System.out.println("Number of classes found: " + classes.size()); for (Class<?> clazz : classes) { System.out.println(clazz.getName()); } Assert.assertTrue("Should be at least 20 classes", classes.size() >= 20); Assert.assertTrue(classes.contains(NestedClassToBeFound.class)); Assert.assertTrue(classes.contains(MemberClassToBeFound.class)); } class MemberClassToBeFound { } static class NestedClassToBeFound { } } ## Instruction: Remove printing to System.out in test ## Code After: /* * Copyright 2015 the original author or authors. * * All rights reserved. This program and the accompanying materials are * made available under the terms of the Eclipse Public License v1.0 which * accompanies this distribution and is available at * * http://www.eclipse.org/legal/epl-v10.html */ package org.junit.gen5.commons.util; import java.io.IOException; import java.util.Arrays; import java.util.List; import org.junit.Assert; import org.junit.Test; public class FindClassesInPackageTest { @Test public void findAllClassesInThisPackage() throws IOException, ClassNotFoundException { List<Class<?>> classes = Arrays.asList(ReflectionUtils.findAllClassesInPackage("org.junit.gen5.commons")); Assert.assertTrue("Should be at least 20 classes", classes.size() >= 20); Assert.assertTrue(classes.contains(NestedClassToBeFound.class)); Assert.assertTrue(classes.contains(MemberClassToBeFound.class)); } class MemberClassToBeFound { } static class NestedClassToBeFound { } }
# ... existing code ... @Test public void findAllClassesInThisPackage() throws IOException, ClassNotFoundException { List<Class<?>> classes = Arrays.asList(ReflectionUtils.findAllClassesInPackage("org.junit.gen5.commons")); Assert.assertTrue("Should be at least 20 classes", classes.size() >= 20); Assert.assertTrue(classes.contains(NestedClassToBeFound.class)); Assert.assertTrue(classes.contains(MemberClassToBeFound.class)); # ... rest of the code ...
c9195f615ca4f08c4ac7aa3911bf2d87a725f555
app/src/main/kotlin/ru/dyatel/tsuschedule/parsing/LessonUtil.kt
app/src/main/kotlin/ru/dyatel/tsuschedule/parsing/LessonUtil.kt
package ru.dyatel.tsuschedule.parsing import android.content.ContentValues import ru.dyatel.tsuschedule.data.LessonTable fun Lesson.toContentValues(): ContentValues { val values = ContentValues() values.put(LessonTable.PARITY, parity.toString()) values.put(LessonTable.WEEKDAY, weekday) values.put(LessonTable.TIME, time) values.put(LessonTable.DISCIPLINE, discipline) values.put(LessonTable.AUDITORY, auditory) values.put(LessonTable.TEACHER, teacher) values.put(LessonTable.TYPE, type.toString()) values.put(LessonTable.SUBGROUP, subgroup) return values }
package ru.dyatel.tsuschedule.parsing import android.content.ContentValues import android.database.Cursor import ru.dyatel.tsuschedule.data.LessonTable fun Lesson.toContentValues(): ContentValues { val values = ContentValues() values.put(LessonTable.PARITY, parity.toString()) values.put(LessonTable.WEEKDAY, weekday) values.put(LessonTable.TIME, time) values.put(LessonTable.DISCIPLINE, discipline) values.put(LessonTable.AUDITORY, auditory) values.put(LessonTable.TEACHER, teacher) values.put(LessonTable.TYPE, type.toString()) values.put(LessonTable.SUBGROUP, subgroup) return values } private fun getColumnPairFromCursor(cursor: Cursor, column: String): Pair<String, Int> = column to cursor.getColumnIndexOrThrow(column) fun getLessonColumnIndices(cursor: Cursor): Map<String, Int> = mapOf( getColumnPairFromCursor(cursor, LessonTable.PARITY), getColumnPairFromCursor(cursor, LessonTable.WEEKDAY), getColumnPairFromCursor(cursor, LessonTable.TIME), getColumnPairFromCursor(cursor, LessonTable.DISCIPLINE), getColumnPairFromCursor(cursor, LessonTable.AUDITORY), getColumnPairFromCursor(cursor, LessonTable.TEACHER), getColumnPairFromCursor(cursor, LessonTable.TYPE), getColumnPairFromCursor(cursor, LessonTable.SUBGROUP) ) fun constructLessonFromCursor(cursor: Cursor, columnIndices: Map<String, Int>) = Lesson( Parity.valueOf(cursor.getString(columnIndices[LessonTable.PARITY]!!)), cursor.getString(columnIndices[LessonTable.WEEKDAY]!!), cursor.getString(columnIndices[LessonTable.TIME]!!), cursor.getString(columnIndices[LessonTable.DISCIPLINE]!!), cursor.getString(columnIndices[LessonTable.AUDITORY]!!), cursor.getString(columnIndices[LessonTable.TEACHER]!!), Lesson.Type.valueOf(cursor.getString(columnIndices[LessonTable.TYPE]!!)), cursor.getInt(columnIndices[LessonTable.SUBGROUP]!!) )
Add utility to get a Lesson instance from the DB
Add utility to get a Lesson instance from the DB
Kotlin
mit
dya-tel/TSU-Schedule
kotlin
## Code Before: package ru.dyatel.tsuschedule.parsing import android.content.ContentValues import ru.dyatel.tsuschedule.data.LessonTable fun Lesson.toContentValues(): ContentValues { val values = ContentValues() values.put(LessonTable.PARITY, parity.toString()) values.put(LessonTable.WEEKDAY, weekday) values.put(LessonTable.TIME, time) values.put(LessonTable.DISCIPLINE, discipline) values.put(LessonTable.AUDITORY, auditory) values.put(LessonTable.TEACHER, teacher) values.put(LessonTable.TYPE, type.toString()) values.put(LessonTable.SUBGROUP, subgroup) return values } ## Instruction: Add utility to get a Lesson instance from the DB ## Code After: package ru.dyatel.tsuschedule.parsing import android.content.ContentValues import android.database.Cursor import ru.dyatel.tsuschedule.data.LessonTable fun Lesson.toContentValues(): ContentValues { val values = ContentValues() values.put(LessonTable.PARITY, parity.toString()) values.put(LessonTable.WEEKDAY, weekday) values.put(LessonTable.TIME, time) values.put(LessonTable.DISCIPLINE, discipline) values.put(LessonTable.AUDITORY, auditory) values.put(LessonTable.TEACHER, teacher) values.put(LessonTable.TYPE, type.toString()) values.put(LessonTable.SUBGROUP, subgroup) return values } private fun getColumnPairFromCursor(cursor: Cursor, column: String): Pair<String, Int> = column to cursor.getColumnIndexOrThrow(column) fun getLessonColumnIndices(cursor: Cursor): Map<String, Int> = mapOf( getColumnPairFromCursor(cursor, LessonTable.PARITY), getColumnPairFromCursor(cursor, LessonTable.WEEKDAY), getColumnPairFromCursor(cursor, LessonTable.TIME), getColumnPairFromCursor(cursor, LessonTable.DISCIPLINE), getColumnPairFromCursor(cursor, LessonTable.AUDITORY), getColumnPairFromCursor(cursor, LessonTable.TEACHER), getColumnPairFromCursor(cursor, LessonTable.TYPE), getColumnPairFromCursor(cursor, LessonTable.SUBGROUP) ) fun constructLessonFromCursor(cursor: Cursor, columnIndices: Map<String, Int>) = Lesson( Parity.valueOf(cursor.getString(columnIndices[LessonTable.PARITY]!!)), cursor.getString(columnIndices[LessonTable.WEEKDAY]!!), cursor.getString(columnIndices[LessonTable.TIME]!!), cursor.getString(columnIndices[LessonTable.DISCIPLINE]!!), cursor.getString(columnIndices[LessonTable.AUDITORY]!!), cursor.getString(columnIndices[LessonTable.TEACHER]!!), Lesson.Type.valueOf(cursor.getString(columnIndices[LessonTable.TYPE]!!)), cursor.getInt(columnIndices[LessonTable.SUBGROUP]!!) )
... package ru.dyatel.tsuschedule.parsing import android.content.ContentValues import android.database.Cursor import ru.dyatel.tsuschedule.data.LessonTable fun Lesson.toContentValues(): ContentValues { ... values.put(LessonTable.SUBGROUP, subgroup) return values } private fun getColumnPairFromCursor(cursor: Cursor, column: String): Pair<String, Int> = column to cursor.getColumnIndexOrThrow(column) fun getLessonColumnIndices(cursor: Cursor): Map<String, Int> = mapOf( getColumnPairFromCursor(cursor, LessonTable.PARITY), getColumnPairFromCursor(cursor, LessonTable.WEEKDAY), getColumnPairFromCursor(cursor, LessonTable.TIME), getColumnPairFromCursor(cursor, LessonTable.DISCIPLINE), getColumnPairFromCursor(cursor, LessonTable.AUDITORY), getColumnPairFromCursor(cursor, LessonTable.TEACHER), getColumnPairFromCursor(cursor, LessonTable.TYPE), getColumnPairFromCursor(cursor, LessonTable.SUBGROUP) ) fun constructLessonFromCursor(cursor: Cursor, columnIndices: Map<String, Int>) = Lesson( Parity.valueOf(cursor.getString(columnIndices[LessonTable.PARITY]!!)), cursor.getString(columnIndices[LessonTable.WEEKDAY]!!), cursor.getString(columnIndices[LessonTable.TIME]!!), cursor.getString(columnIndices[LessonTable.DISCIPLINE]!!), cursor.getString(columnIndices[LessonTable.AUDITORY]!!), cursor.getString(columnIndices[LessonTable.TEACHER]!!), Lesson.Type.valueOf(cursor.getString(columnIndices[LessonTable.TYPE]!!)), cursor.getInt(columnIndices[LessonTable.SUBGROUP]!!) ) ...
a7df3bb7cc898224bc9844fb160164ca29a237ef
scanner/scanner.h
scanner/scanner.h
/** * @class Scanner * @brief Class representing a scanner (lexical analyzer). It takes a file path as input and generates tokens, either lazily or as a QVector. */ class Scanner { public: Scanner(const QString& sourcePath); /** * @brief Accessor to the vecotr of tokens parsed at construction time. * @return const reference to the vector of tokens. */ const QVector<Token>& tokens() const {return _tokens;} private: QChar peek() const; //Current character QChar next() const; //Read next character void advance(); Token nextToken(); //Utility functions for nextToken(). Token parseAlphaNum(); //Parse an expression that starts with a letter : identifiers, reserved words, bool literals Token parseStringLiteral(); Token parseCharLiteral(); Token parseNumberLiteral(); void skipComment(); QString fileContent; QVector<Token> _tokens; int currentChar = 0; int currentLine = 1; int currentRow = 1; }; #endif // SCANNER_H
/** * @class Scanner * @brief Class representing a scanner (lexical analyzer). It takes a file path as input and generates tokens, either lazily or as a QVector. */ class Scanner { public: Scanner(const QString& sourcePath); Scanner() = delete; Scanner(const Scanner& src) = delete; Scanner(Scanner &&src) = delete; Scanner& operator= (const Scanner& src) = delete; Scanner& operator= (Scanner&& src) = delete; /** * @brief Accessor to the vecotr of tokens parsed at construction time. * @return const reference to the vector of tokens. */ const QVector<Token>& tokens() const {return _tokens;} private: QChar peek() const; //Current character QChar next() const; //Read next character void advance(); Token nextToken(); //Utility functions for nextToken(). Token parseAlphaNum(); //Parse an expression that starts with a letter : identifiers, reserved words, bool literals Token parseStringLiteral(); Token parseCharLiteral(); Token parseNumberLiteral(); void skipComment(); QString fileContent; QVector<Token> _tokens; int currentChar = 0; int currentLine = 1; int currentRow = 1; }; #endif // SCANNER_H
Set copy/move constructors to deleted
Set copy/move constructors to deleted
C
mit
bisthebis/Boboscript,bisthebis/Boboscript
c
## Code Before: /** * @class Scanner * @brief Class representing a scanner (lexical analyzer). It takes a file path as input and generates tokens, either lazily or as a QVector. */ class Scanner { public: Scanner(const QString& sourcePath); /** * @brief Accessor to the vecotr of tokens parsed at construction time. * @return const reference to the vector of tokens. */ const QVector<Token>& tokens() const {return _tokens;} private: QChar peek() const; //Current character QChar next() const; //Read next character void advance(); Token nextToken(); //Utility functions for nextToken(). Token parseAlphaNum(); //Parse an expression that starts with a letter : identifiers, reserved words, bool literals Token parseStringLiteral(); Token parseCharLiteral(); Token parseNumberLiteral(); void skipComment(); QString fileContent; QVector<Token> _tokens; int currentChar = 0; int currentLine = 1; int currentRow = 1; }; #endif // SCANNER_H ## Instruction: Set copy/move constructors to deleted ## Code After: /** * @class Scanner * @brief Class representing a scanner (lexical analyzer). It takes a file path as input and generates tokens, either lazily or as a QVector. */ class Scanner { public: Scanner(const QString& sourcePath); Scanner() = delete; Scanner(const Scanner& src) = delete; Scanner(Scanner &&src) = delete; Scanner& operator= (const Scanner& src) = delete; Scanner& operator= (Scanner&& src) = delete; /** * @brief Accessor to the vecotr of tokens parsed at construction time. * @return const reference to the vector of tokens. */ const QVector<Token>& tokens() const {return _tokens;} private: QChar peek() const; //Current character QChar next() const; //Read next character void advance(); Token nextToken(); //Utility functions for nextToken(). Token parseAlphaNum(); //Parse an expression that starts with a letter : identifiers, reserved words, bool literals Token parseStringLiteral(); Token parseCharLiteral(); Token parseNumberLiteral(); void skipComment(); QString fileContent; QVector<Token> _tokens; int currentChar = 0; int currentLine = 1; int currentRow = 1; }; #endif // SCANNER_H
// ... existing code ... { public: Scanner(const QString& sourcePath); Scanner() = delete; Scanner(const Scanner& src) = delete; Scanner(Scanner &&src) = delete; Scanner& operator= (const Scanner& src) = delete; Scanner& operator= (Scanner&& src) = delete; /** * @brief Accessor to the vecotr of tokens parsed at construction time. // ... rest of the code ...
bb190d54324e15ed6ce99845047228eaef5e57cb
test_core.py
test_core.py
from ookoobah import core from ookoobah import utils game = core.Game() utils.populate_grid_from_string(game.grid, """ ###### #>..\# #.#..# #....# #.\./# ###### """) game.start() for n in range(10): print utils.dump_grid_to_string(game.grid, game.ball) game.step()
from ookoobah import core from ookoobah import utils game = core.Game() utils.populate_grid_from_string(game.grid, """ ###### #>..\# #.#..# #....# #.\./# ###### """) game.start() print "hit <enter> to render next; ^C to abort" while True: print utils.dump_grid_to_string(game.grid, game.ball) game.step() raw_input()
Make the core dumper interactive
test: Make the core dumper interactive
Python
mit
vickenty/ookoobah,vickenty/ookoobah,vickenty/ookoobah,vickenty/ookoobah,vickenty/ookoobah
python
## Code Before: from ookoobah import core from ookoobah import utils game = core.Game() utils.populate_grid_from_string(game.grid, """ ###### #>..\# #.#..# #....# #.\./# ###### """) game.start() for n in range(10): print utils.dump_grid_to_string(game.grid, game.ball) game.step() ## Instruction: test: Make the core dumper interactive ## Code After: from ookoobah import core from ookoobah import utils game = core.Game() utils.populate_grid_from_string(game.grid, """ ###### #>..\# #.#..# #....# #.\./# ###### """) game.start() print "hit <enter> to render next; ^C to abort" while True: print utils.dump_grid_to_string(game.grid, game.ball) game.step() raw_input()
... """) game.start() print "hit <enter> to render next; ^C to abort" while True: print utils.dump_grid_to_string(game.grid, game.ball) game.step() raw_input() ...
3d2f9087e62006f8a5f19476ae23324a4cfa7793
regex.py
regex.py
import re import sys f = open ('/var/local/meTypesetTests/tests/testOutput/'+sys.argv[1] +'/nlm/out.xml', "r") print ("open operation complete") fd = f.read() s = '' fd = pattern = re.compile(r'(?:(&#\d*|>))(.*?)(?=(&#\d*|<))') for e in re.findall(pattern, fd): s += ' ' s += e[1] s = re.sub('-', ' ', s) s = re.sub(r'\,', ' ', s) s = re.sub(r'\.', ' ', s) s = re.sub('\'', '', s) s = re.sub(r'\;', ' ', s) s = re.sub('s', ' ', s) s = re.sub(r'\(.*?\)', ' ', s) s = re.sub(r'(\[.*?\])', ' ', s) f.close() o = open ( '/var/local/meTypesetTests/tests/regexOutput/'+sys.argv[1], "w") o.write(s) o.close()
import re import sys f = open ('/var/local/meTypesetTests/tests/testOutput/'+sys.argv[1] +'/nlm/out.xml', "r") print ("open operation complete") fd = f.read() s = '' fd = re.sub(r'\&lt.*?\&gt\;', ' ', fd) pattern = re.compile(r'(?:(&#\d*|>))(.*?)(?=(&#\d*|<))') for e in re.findall(pattern, fd): s += ' ' s += e[1] s = re.sub('-', ' ', s) s = re.sub(r'\,', ' ', s) s = re.sub(r'\.', ' ', s) s = re.sub('\'', '', s) s = re.sub(r'\;', ' ', s) s = re.sub('s', ' ', s) s = re.sub(r'\(.*?\)', ' ', s) s = re.sub(r'(\[.*?\])', ' ', s) f.close() o = open ( '/var/local/meTypesetTests/tests/regexOutput/'+sys.argv[1], "w") o.write(s) o.close()
Update of work over prior couple weeks.
Update of work over prior couple weeks.
Python
mit
jnicolls/meTypeset-Test,jnicolls/Joseph
python
## Code Before: import re import sys f = open ('/var/local/meTypesetTests/tests/testOutput/'+sys.argv[1] +'/nlm/out.xml', "r") print ("open operation complete") fd = f.read() s = '' fd = pattern = re.compile(r'(?:(&#\d*|>))(.*?)(?=(&#\d*|<))') for e in re.findall(pattern, fd): s += ' ' s += e[1] s = re.sub('-', ' ', s) s = re.sub(r'\,', ' ', s) s = re.sub(r'\.', ' ', s) s = re.sub('\'', '', s) s = re.sub(r'\;', ' ', s) s = re.sub('s', ' ', s) s = re.sub(r'\(.*?\)', ' ', s) s = re.sub(r'(\[.*?\])', ' ', s) f.close() o = open ( '/var/local/meTypesetTests/tests/regexOutput/'+sys.argv[1], "w") o.write(s) o.close() ## Instruction: Update of work over prior couple weeks. ## Code After: import re import sys f = open ('/var/local/meTypesetTests/tests/testOutput/'+sys.argv[1] +'/nlm/out.xml', "r") print ("open operation complete") fd = f.read() s = '' fd = re.sub(r'\&lt.*?\&gt\;', ' ', fd) pattern = re.compile(r'(?:(&#\d*|>))(.*?)(?=(&#\d*|<))') for e in re.findall(pattern, fd): s += ' ' s += e[1] s = re.sub('-', ' ', s) s = re.sub(r'\,', ' ', s) s = re.sub(r'\.', ' ', s) s = re.sub('\'', '', s) s = re.sub(r'\;', ' ', s) s = re.sub('s', ' ', s) s = re.sub(r'\(.*?\)', ' ', s) s = re.sub(r'(\[.*?\])', ' ', s) f.close() o = open ( '/var/local/meTypesetTests/tests/regexOutput/'+sys.argv[1], "w") o.write(s) o.close()
// ... existing code ... fd = f.read() s = '' fd = re.sub(r'\&lt.*?\&gt\;', ' ', fd) pattern = re.compile(r'(?:(&#\d*|>))(.*?)(?=(&#\d*|<))') for e in re.findall(pattern, fd): s += ' ' // ... rest of the code ...
54cfb9864256b27b9f4cd411f170cc12d47727e5
appengine/components/components/machine_provider/dimensions.py
appengine/components/components/machine_provider/dimensions.py
"""Dimensions for the Machine Provider.""" from protorpc import messages class Backend(messages.Enum): """Lists valid backends.""" DUMMY = 0 GCE = 1 class OSFamily(messages.Enum): """Lists valid OS families.""" LINUX = 1 OSX = 2 WINDOWS = 3 class Dimensions(messages.Message): """Represents the dimensions of a machine.""" # The operating system family of this machine. os_family = messages.EnumField(OSFamily, 1) # The backend which should be used to spin up this machine. This should # generally be left unspecified so the Machine Provider selects the backend # on its own. backend = messages.EnumField(Backend, 2) # The hostname of this machine. hostname = messages.StringField(3) # The number of CPUs available to this machine. num_cpus = messages.IntegerField(4) # The amount of memory available to this machine. memory_gb = messages.FloatField(5) # The disk space available to this machine. disk_gb = messages.IntegerField(6)
"""Dimensions for the Machine Provider.""" from protorpc import messages class Backend(messages.Enum): """Lists valid backends.""" DUMMY = 0 GCE = 1 VSPHERE = 2 class OSFamily(messages.Enum): """Lists valid OS families.""" LINUX = 1 OSX = 2 WINDOWS = 3 class Dimensions(messages.Message): """Represents the dimensions of a machine.""" # The operating system family of this machine. os_family = messages.EnumField(OSFamily, 1) # The backend which should be used to spin up this machine. This should # generally be left unspecified so the Machine Provider selects the backend # on its own. backend = messages.EnumField(Backend, 2) # The hostname of this machine. hostname = messages.StringField(3) # The number of CPUs available to this machine. num_cpus = messages.IntegerField(4) # The amount of memory available to this machine. memory_gb = messages.FloatField(5) # The disk space available to this machine. disk_gb = messages.IntegerField(6)
Add enum field for vSphere backend
Add enum field for vSphere backend Review-Url: https://codereview.chromium.org/1997903002
Python
apache-2.0
luci/luci-py,luci/luci-py,luci/luci-py,luci/luci-py
python
## Code Before: """Dimensions for the Machine Provider.""" from protorpc import messages class Backend(messages.Enum): """Lists valid backends.""" DUMMY = 0 GCE = 1 class OSFamily(messages.Enum): """Lists valid OS families.""" LINUX = 1 OSX = 2 WINDOWS = 3 class Dimensions(messages.Message): """Represents the dimensions of a machine.""" # The operating system family of this machine. os_family = messages.EnumField(OSFamily, 1) # The backend which should be used to spin up this machine. This should # generally be left unspecified so the Machine Provider selects the backend # on its own. backend = messages.EnumField(Backend, 2) # The hostname of this machine. hostname = messages.StringField(3) # The number of CPUs available to this machine. num_cpus = messages.IntegerField(4) # The amount of memory available to this machine. memory_gb = messages.FloatField(5) # The disk space available to this machine. disk_gb = messages.IntegerField(6) ## Instruction: Add enum field for vSphere backend Review-Url: https://codereview.chromium.org/1997903002 ## Code After: """Dimensions for the Machine Provider.""" from protorpc import messages class Backend(messages.Enum): """Lists valid backends.""" DUMMY = 0 GCE = 1 VSPHERE = 2 class OSFamily(messages.Enum): """Lists valid OS families.""" LINUX = 1 OSX = 2 WINDOWS = 3 class Dimensions(messages.Message): """Represents the dimensions of a machine.""" # The operating system family of this machine. os_family = messages.EnumField(OSFamily, 1) # The backend which should be used to spin up this machine. This should # generally be left unspecified so the Machine Provider selects the backend # on its own. backend = messages.EnumField(Backend, 2) # The hostname of this machine. hostname = messages.StringField(3) # The number of CPUs available to this machine. num_cpus = messages.IntegerField(4) # The amount of memory available to this machine. memory_gb = messages.FloatField(5) # The disk space available to this machine. disk_gb = messages.IntegerField(6)
... """Lists valid backends.""" DUMMY = 0 GCE = 1 VSPHERE = 2 class OSFamily(messages.Enum): ...
34ea5331f8e05dacf356096dfc1b63682fa78654
Wikipedia/Code/BITHockeyManager+WMFExtensions.h
Wikipedia/Code/BITHockeyManager+WMFExtensions.h
@interface BITHockeyManager (WMFExtensions) <BITHockeyManagerDelegate> /** * Configure and startup in one line. * This will call the methods below as part of the configuration process. * This method will use the current bundle id of the app */ - (void)wmf_setupAndStart; /** * Configure the alert to be displayed when a user is prompeted to send a crash report */ - (void)wmf_setupCrashNotificationAlert; @end
@import HockeySDK; @interface BITHockeyManager (WMFExtensions) <BITHockeyManagerDelegate> /** * Configure and startup in one line. * This will call the methods below as part of the configuration process. * This method will use the current bundle id of the app */ - (void)wmf_setupAndStart; /** * Configure the alert to be displayed when a user is prompeted to send a crash report */ - (void)wmf_setupCrashNotificationAlert; @end
Revert "use old import syntax for HockeySDK"
Revert "use old import syntax for HockeySDK" This reverts commit 0babdd70b3ab330f032790521002f2e171fcf3e6.
C
mit
wikimedia/wikipedia-ios,wikimedia/wikipedia-ios,josve05a/wikipedia-ios,julienbodet/wikipedia-ios,wikimedia/apps-ios-wikipedia,wikimedia/apps-ios-wikipedia,montehurd/apps-ios-wikipedia,wikimedia/apps-ios-wikipedia,wikimedia/wikipedia-ios,montehurd/apps-ios-wikipedia,wikimedia/wikipedia-ios,montehurd/apps-ios-wikipedia,josve05a/wikipedia-ios,wikimedia/apps-ios-wikipedia,josve05a/wikipedia-ios,wikimedia/apps-ios-wikipedia,josve05a/wikipedia-ios,wikimedia/apps-ios-wikipedia,wikimedia/wikipedia-ios,wikimedia/wikipedia-ios,julienbodet/wikipedia-ios,josve05a/wikipedia-ios,montehurd/apps-ios-wikipedia,wikimedia/apps-ios-wikipedia,julienbodet/wikipedia-ios,montehurd/apps-ios-wikipedia,josve05a/wikipedia-ios,julienbodet/wikipedia-ios,julienbodet/wikipedia-ios,josve05a/wikipedia-ios,julienbodet/wikipedia-ios,montehurd/apps-ios-wikipedia,montehurd/apps-ios-wikipedia,montehurd/apps-ios-wikipedia,julienbodet/wikipedia-ios,julienbodet/wikipedia-ios,josve05a/wikipedia-ios,wikimedia/wikipedia-ios,wikimedia/apps-ios-wikipedia
c
## Code Before: @interface BITHockeyManager (WMFExtensions) <BITHockeyManagerDelegate> /** * Configure and startup in one line. * This will call the methods below as part of the configuration process. * This method will use the current bundle id of the app */ - (void)wmf_setupAndStart; /** * Configure the alert to be displayed when a user is prompeted to send a crash report */ - (void)wmf_setupCrashNotificationAlert; @end ## Instruction: Revert "use old import syntax for HockeySDK" This reverts commit 0babdd70b3ab330f032790521002f2e171fcf3e6. ## Code After: @import HockeySDK; @interface BITHockeyManager (WMFExtensions) <BITHockeyManagerDelegate> /** * Configure and startup in one line. * This will call the methods below as part of the configuration process. * This method will use the current bundle id of the app */ - (void)wmf_setupAndStart; /** * Configure the alert to be displayed when a user is prompeted to send a crash report */ - (void)wmf_setupCrashNotificationAlert; @end
// ... existing code ... @import HockeySDK; @interface BITHockeyManager (WMFExtensions) <BITHockeyManagerDelegate> // ... rest of the code ...
43c1f230382a3b7ad7776d28840c5305bb919ab9
jujugui/__init__.py
jujugui/__init__.py
from pyramid.config import Configurator def main(global_config, **settings): """Return a Pyramid WSGI application.""" config = Configurator(settings=settings) return make_application(config) def make_application(config): """Set up the routes and return the WSGI application.""" # We use two separate included app/routes so that we can # have the gui parts behind a separate route from the # assets when we embed it in e.g. the storefront. config.include('jujugui.gui') config.include('jujugui.assets') return config.make_wsgi_app()
from pyramid.config import Configurator def main(global_config, **settings): """Return a Pyramid WSGI application.""" config = Configurator(settings=settings) return make_application(config) def make_application(config): """Set up the routes and return the WSGI application.""" # We use two separate included app/routes so that we can # have the gui parts behind a separate route from the # assets when we embed it in e.g. the storefront. # NOTE: kadams54, 2015-08-04: It's very important that assets be listed # first; if it isn't, then the jujugui.gui routes override those specified # in assets and any asset requests will go to the main app. config.include('jujugui.assets') config.include('jujugui.gui') return config.make_wsgi_app()
Fix load order to fix routes.
Fix load order to fix routes.
Python
agpl-3.0
bac/juju-gui,bac/juju-gui,mitechie/juju-gui,mitechie/juju-gui,mitechie/juju-gui,mitechie/juju-gui,bac/juju-gui,bac/juju-gui
python
## Code Before: from pyramid.config import Configurator def main(global_config, **settings): """Return a Pyramid WSGI application.""" config = Configurator(settings=settings) return make_application(config) def make_application(config): """Set up the routes and return the WSGI application.""" # We use two separate included app/routes so that we can # have the gui parts behind a separate route from the # assets when we embed it in e.g. the storefront. config.include('jujugui.gui') config.include('jujugui.assets') return config.make_wsgi_app() ## Instruction: Fix load order to fix routes. ## Code After: from pyramid.config import Configurator def main(global_config, **settings): """Return a Pyramid WSGI application.""" config = Configurator(settings=settings) return make_application(config) def make_application(config): """Set up the routes and return the WSGI application.""" # We use two separate included app/routes so that we can # have the gui parts behind a separate route from the # assets when we embed it in e.g. the storefront. # NOTE: kadams54, 2015-08-04: It's very important that assets be listed # first; if it isn't, then the jujugui.gui routes override those specified # in assets and any asset requests will go to the main app. config.include('jujugui.assets') config.include('jujugui.gui') return config.make_wsgi_app()
... # We use two separate included app/routes so that we can # have the gui parts behind a separate route from the # assets when we embed it in e.g. the storefront. # NOTE: kadams54, 2015-08-04: It's very important that assets be listed # first; if it isn't, then the jujugui.gui routes override those specified # in assets and any asset requests will go to the main app. config.include('jujugui.assets') config.include('jujugui.gui') return config.make_wsgi_app() ...
fddc1198d54a8a868bd8b97ed7318feeb00f6725
setup.py
setup.py
from setuptools import setup VERSION = '0.2.8' setup( name='jinja2_standalone_compiler', packages=['jinja2_standalone_compiler', ], version=VERSION, author='Filipe Waitman', author_email='[email protected]', install_requires=[x.strip() for x in open('requirements.txt').readlines()], url='https://github.com/filwaitman/jinja2-standalone-compiler', download_url='https://github.com/filwaitman/jinja2-standalone-compiler/tarball/{}'.format(VERSION), test_suite='tests', classifiers=[ "Development Status :: 1 - Planning", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Operating System :: OS Independent", ], entry_points="""\ [console_scripts] jinja2_standalone_compiler = jinja2_standalone_compiler:main_command """, )
from setuptools import setup VERSION = '0.2.9' setup( name='jinja2_standalone_compiler', packages=['jinja2_standalone_compiler', ], version=VERSION, author='Filipe Waitman', author_email='[email protected]', install_requires=[x.strip() for x in open('requirements.txt').readlines()], url='https://github.com/filwaitman/jinja2-standalone-compiler', download_url='https://github.com/filwaitman/jinja2-standalone-compiler/tarball/{}'.format(VERSION), test_suite='tests', keywords=['Jinja2', 'Jinja', 'renderer', 'compiler', 'HTML'], classifiers=[ "Development Status :: 1 - Planning", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Operating System :: OS Independent", ], entry_points="""\ [console_scripts] jinja2_standalone_compiler = jinja2_standalone_compiler:main_command """, )
Add keywords and bump to 0.2.9
Add keywords and bump to 0.2.9
Python
mit
filwaitman/jinja2-standalone-compiler
python
## Code Before: from setuptools import setup VERSION = '0.2.8' setup( name='jinja2_standalone_compiler', packages=['jinja2_standalone_compiler', ], version=VERSION, author='Filipe Waitman', author_email='[email protected]', install_requires=[x.strip() for x in open('requirements.txt').readlines()], url='https://github.com/filwaitman/jinja2-standalone-compiler', download_url='https://github.com/filwaitman/jinja2-standalone-compiler/tarball/{}'.format(VERSION), test_suite='tests', classifiers=[ "Development Status :: 1 - Planning", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Operating System :: OS Independent", ], entry_points="""\ [console_scripts] jinja2_standalone_compiler = jinja2_standalone_compiler:main_command """, ) ## Instruction: Add keywords and bump to 0.2.9 ## Code After: from setuptools import setup VERSION = '0.2.9' setup( name='jinja2_standalone_compiler', packages=['jinja2_standalone_compiler', ], version=VERSION, author='Filipe Waitman', author_email='[email protected]', install_requires=[x.strip() for x in open('requirements.txt').readlines()], url='https://github.com/filwaitman/jinja2-standalone-compiler', download_url='https://github.com/filwaitman/jinja2-standalone-compiler/tarball/{}'.format(VERSION), test_suite='tests', keywords=['Jinja2', 'Jinja', 'renderer', 'compiler', 'HTML'], classifiers=[ "Development Status :: 1 - Planning", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Operating System :: OS Independent", ], entry_points="""\ [console_scripts] jinja2_standalone_compiler = jinja2_standalone_compiler:main_command """, )
# ... existing code ... from setuptools import setup VERSION = '0.2.9' setup( # ... modified code ... url='https://github.com/filwaitman/jinja2-standalone-compiler', download_url='https://github.com/filwaitman/jinja2-standalone-compiler/tarball/{}'.format(VERSION), test_suite='tests', keywords=['Jinja2', 'Jinja', 'renderer', 'compiler', 'HTML'], classifiers=[ "Development Status :: 1 - Planning", "Intended Audience :: Developers", # ... rest of the code ...
4a838a3e1df1f832a013b3e8a18e5474b06d0f9a
easy_bake.py
easy_bake.py
import RPi.GPIO as gpio import time #use board numbering on the pi gpio.setmode(gpio.BOARD) gpio.setup(40, gpio.OUT) gpio.setup(38, gpio.OUT) #true and 1 are the same gpio.output(40, True) gpio.output(38, 1) while True: gpio.output(40, True) gpio.output(38, False) time.sleep(4) gpio.output(40, 0) gpio.output(38, 1)
import RPi.GPIO as gpio import time #use board numbering on the pi gpio.setmode(gpio.BOARD) output_pins = [40, 38] gpio.setup(output_pins, gpio.OUT) #true and 1 are the same # gpio.output(40, True) # gpio.output(38, 1) while True: gpio.output(output_pins, (True, False)) # gpio.output(40, True) # gpio.output(38, False) time.sleep(1) # gpio.output(40, False) # gpio.output(38, True) gpio.output(output_pins, (False, True)) gpio.cleanup()
Add in array or tuple of pins
Add in array or tuple of pins
Python
mit
emgreen33/easy_bake,emgreen33/easy_bake
python
## Code Before: import RPi.GPIO as gpio import time #use board numbering on the pi gpio.setmode(gpio.BOARD) gpio.setup(40, gpio.OUT) gpio.setup(38, gpio.OUT) #true and 1 are the same gpio.output(40, True) gpio.output(38, 1) while True: gpio.output(40, True) gpio.output(38, False) time.sleep(4) gpio.output(40, 0) gpio.output(38, 1) ## Instruction: Add in array or tuple of pins ## Code After: import RPi.GPIO as gpio import time #use board numbering on the pi gpio.setmode(gpio.BOARD) output_pins = [40, 38] gpio.setup(output_pins, gpio.OUT) #true and 1 are the same # gpio.output(40, True) # gpio.output(38, 1) while True: gpio.output(output_pins, (True, False)) # gpio.output(40, True) # gpio.output(38, False) time.sleep(1) # gpio.output(40, False) # gpio.output(38, True) gpio.output(output_pins, (False, True)) gpio.cleanup()
# ... existing code ... #use board numbering on the pi gpio.setmode(gpio.BOARD) output_pins = [40, 38] gpio.setup(output_pins, gpio.OUT) #true and 1 are the same # gpio.output(40, True) # gpio.output(38, 1) while True: gpio.output(output_pins, (True, False)) # gpio.output(40, True) # gpio.output(38, False) time.sleep(1) # gpio.output(40, False) # gpio.output(38, True) gpio.output(output_pins, (False, True)) gpio.cleanup() # ... rest of the code ...
a5274f0628bec7a77fc2722ced723c4f35f3fb4b
microcosm_flask/fields/query_string_list.py
microcosm_flask/fields/query_string_list.py
from marshmallow.fields import List, ValidationError class SelfSerializableList(list): def __str__(self): return ",".join(str(item) for item in self) class QueryStringList(List): def _deserialize(self, value, attr, obj): """ _deserialize handles multiple formats of query string parameter lists including: /foo?bars=1,2 /foo?bars[]=1&bars[]2 and returns a list of values """ if value is None: return None try: attribute_elements = [attr_element.split(",") for attr_element in obj.getlist(attr)] attribute_params = SelfSerializableList(param for attr_param in attribute_elements for param in attr_param) return attribute_params except ValueError: raise ValidationError("Invalid query string list argument")
from marshmallow.fields import List, ValidationError class PrintableList(list): def __str__(self): return ",".join(str(item) for item in self) class QueryStringList(List): def _deserialize(self, value, attr, obj): """ _deserialize handles multiple formats of query string parameter lists including: /foo?bars=1,2 /foo?bars[]=1&bars[]2 and returns a list of values """ if value is None: return None try: attribute_elements = [attr_element.split(",") for attr_element in obj.getlist(attr)] attribute_params = PrintableList(param for attr_param in attribute_elements for param in attr_param) return attribute_params except ValueError: raise ValidationError("Invalid query string list argument")
Change the name of SelfSerializableList to PrintableList
Change the name of SelfSerializableList to PrintableList
Python
apache-2.0
globality-corp/microcosm-flask,globality-corp/microcosm-flask
python
## Code Before: from marshmallow.fields import List, ValidationError class SelfSerializableList(list): def __str__(self): return ",".join(str(item) for item in self) class QueryStringList(List): def _deserialize(self, value, attr, obj): """ _deserialize handles multiple formats of query string parameter lists including: /foo?bars=1,2 /foo?bars[]=1&bars[]2 and returns a list of values """ if value is None: return None try: attribute_elements = [attr_element.split(",") for attr_element in obj.getlist(attr)] attribute_params = SelfSerializableList(param for attr_param in attribute_elements for param in attr_param) return attribute_params except ValueError: raise ValidationError("Invalid query string list argument") ## Instruction: Change the name of SelfSerializableList to PrintableList ## Code After: from marshmallow.fields import List, ValidationError class PrintableList(list): def __str__(self): return ",".join(str(item) for item in self) class QueryStringList(List): def _deserialize(self, value, attr, obj): """ _deserialize handles multiple formats of query string parameter lists including: /foo?bars=1,2 /foo?bars[]=1&bars[]2 and returns a list of values """ if value is None: return None try: attribute_elements = [attr_element.split(",") for attr_element in obj.getlist(attr)] attribute_params = PrintableList(param for attr_param in attribute_elements for param in attr_param) return attribute_params except ValueError: raise ValidationError("Invalid query string list argument")
... from marshmallow.fields import List, ValidationError class PrintableList(list): def __str__(self): return ",".join(str(item) for item in self) ... try: attribute_elements = [attr_element.split(",") for attr_element in obj.getlist(attr)] attribute_params = PrintableList(param for attr_param in attribute_elements for param in attr_param) return attribute_params except ValueError: ...
2b1e60a9910561de5a71e83d042b845f6be0bc73
__init__.py
__init__.py
from . import platform_specific, input from .graphics import screen from .run_loop import main_run_loop, every platform_specific.fixup_env() def run(): main_run_loop.add_wait_callback(input.check_for_quit_event) main_run_loop.add_after_action_callback(screen.after_loop) main_run_loop.run()
from . import platform_specific, input from .graphics import screen from .run_loop import main_run_loop, every platform_specific.fixup_env() def run(loop=None): if loop is not None: every(seconds=1.0/30)(loop) main_run_loop.add_wait_callback(input.check_for_quit_event) main_run_loop.add_after_action_callback(screen.after_loop) main_run_loop.run()
Allow run argument to avoid @every template
Allow run argument to avoid @every template
Python
bsd-2-clause
furbrain/tingbot-python
python
## Code Before: from . import platform_specific, input from .graphics import screen from .run_loop import main_run_loop, every platform_specific.fixup_env() def run(): main_run_loop.add_wait_callback(input.check_for_quit_event) main_run_loop.add_after_action_callback(screen.after_loop) main_run_loop.run() ## Instruction: Allow run argument to avoid @every template ## Code After: from . import platform_specific, input from .graphics import screen from .run_loop import main_run_loop, every platform_specific.fixup_env() def run(loop=None): if loop is not None: every(seconds=1.0/30)(loop) main_run_loop.add_wait_callback(input.check_for_quit_event) main_run_loop.add_after_action_callback(screen.after_loop) main_run_loop.run()
# ... existing code ... platform_specific.fixup_env() def run(loop=None): if loop is not None: every(seconds=1.0/30)(loop) main_run_loop.add_wait_callback(input.check_for_quit_event) main_run_loop.add_after_action_callback(screen.after_loop) # ... rest of the code ...
92febbffb91943f13cfac8c00e55103b20645b70
plex/objects/library/container.py
plex/objects/library/container.py
from plex.objects.core.base import Property from plex.objects.container import Container from plex.objects.library.section import Section class MediaContainer(Container): section = Property(resolver=lambda: MediaContainer.construct_section) title1 = Property title2 = Property identifier = Property art = Property thumb = Property view_group = Property('viewGroup') view_mode = Property('viewMode', int) media_tag_prefix = Property('mediaTagPrefix') media_tag_version = Property('mediaTagVersion') no_cache = Property('nocache', bool) allow_sync = Property('allowSync', bool) mixed_parents = Property('mixedParents', bool) @staticmethod def construct_section(client, node): attribute_map = { 'key': 'librarySectionID', 'uuid': 'librarySectionUUID', 'title': 'librarySectionTitle' } return Section.construct(client, node, attribute_map, child=True)
from plex.objects.core.base import Property from plex.objects.container import Container from plex.objects.library.section import Section class MediaContainer(Container): section = Property(resolver=lambda: MediaContainer.construct_section) title1 = Property title2 = Property identifier = Property art = Property thumb = Property view_group = Property('viewGroup') view_mode = Property('viewMode', int) media_tag_prefix = Property('mediaTagPrefix') media_tag_version = Property('mediaTagVersion') no_cache = Property('nocache', bool) allow_sync = Property('allowSync', bool) mixed_parents = Property('mixedParents', bool) @staticmethod def construct_section(client, node): attribute_map = { 'key': 'librarySectionID', 'uuid': 'librarySectionUUID', 'title': 'librarySectionTitle' } return Section.construct(client, node, attribute_map, child=True) def __iter__(self): for item in super(MediaContainer, self).__iter__(): item.section = self.section yield item
Update [MediaContainer] children with the correct `section` object
Update [MediaContainer] children with the correct `section` object
Python
mit
fuzeman/plex.py
python
## Code Before: from plex.objects.core.base import Property from plex.objects.container import Container from plex.objects.library.section import Section class MediaContainer(Container): section = Property(resolver=lambda: MediaContainer.construct_section) title1 = Property title2 = Property identifier = Property art = Property thumb = Property view_group = Property('viewGroup') view_mode = Property('viewMode', int) media_tag_prefix = Property('mediaTagPrefix') media_tag_version = Property('mediaTagVersion') no_cache = Property('nocache', bool) allow_sync = Property('allowSync', bool) mixed_parents = Property('mixedParents', bool) @staticmethod def construct_section(client, node): attribute_map = { 'key': 'librarySectionID', 'uuid': 'librarySectionUUID', 'title': 'librarySectionTitle' } return Section.construct(client, node, attribute_map, child=True) ## Instruction: Update [MediaContainer] children with the correct `section` object ## Code After: from plex.objects.core.base import Property from plex.objects.container import Container from plex.objects.library.section import Section class MediaContainer(Container): section = Property(resolver=lambda: MediaContainer.construct_section) title1 = Property title2 = Property identifier = Property art = Property thumb = Property view_group = Property('viewGroup') view_mode = Property('viewMode', int) media_tag_prefix = Property('mediaTagPrefix') media_tag_version = Property('mediaTagVersion') no_cache = Property('nocache', bool) allow_sync = Property('allowSync', bool) mixed_parents = Property('mixedParents', bool) @staticmethod def construct_section(client, node): attribute_map = { 'key': 'librarySectionID', 'uuid': 'librarySectionUUID', 'title': 'librarySectionTitle' } return Section.construct(client, node, attribute_map, child=True) def __iter__(self): for item in super(MediaContainer, self).__iter__(): item.section = self.section yield item
// ... existing code ... } return Section.construct(client, node, attribute_map, child=True) def __iter__(self): for item in super(MediaContainer, self).__iter__(): item.section = self.section yield item // ... rest of the code ...
5d2a4ac0e48d404a16b81d2f290be5ec13bdf8f1
logintokens/forms.py
logintokens/forms.py
from django import forms from django.contrib.auth import get_user_model from django.core.mail import EmailMultiAlternatives from django.contrib.sites.shortcuts import get_current_site from django.urls import reverse_lazy from logintokens.tokens import default_token_generator USER = get_user_model() class TokenLoginForm(forms.Form): email = forms.EmailField(label="Email", max_length=254) def generate_login_link(self, email, request): protocol = 'https' if request.is_secure() else 'http' domain = get_current_site(request).domain url = reverse_lazy('token_login') token = default_token_generator.make_token(email) return '{}://{}{}?token={}'.format(protocol, domain, url, token) def save(self, request): """Generate a login token and send it to the email from the form. """ email = self.cleaned_data['email'] body = 'To complete the login process, simply click on this link: {}' login_link = self.generate_login_link(email, request) email_message = EmailMultiAlternatives( 'Your login link for ANIAuth', body.format(login_link), to=[email] ) email_message.send()
from django import forms from django.contrib.auth import get_user_model from django.contrib.auth.forms import UsernameField from django.core.mail import EmailMultiAlternatives from django.contrib.sites.shortcuts import get_current_site from django.urls import reverse_lazy from logintokens.tokens import default_token_generator USER = get_user_model() class TokenLoginForm(forms.Form): email = UsernameField( max_length=254, widget=forms.TextInput(attrs={'autofocus': True}), ) def generate_login_link(self, username, request): protocol = 'https' if request.is_secure() else 'http' domain = get_current_site(request).domain url = reverse_lazy('token_login') token = default_token_generator.make_token(username) return '{}://{}{}?token={}'.format(protocol, domain, url, token) def save(self, request): """Generate a login token and send it to the email from the form. """ username = self.cleaned_data['email'] try: user = USER._default_manager.get_by_natural_key(username) email = getattr(user, USER.EMAIL_FIELD) except USER.DoesNotExist: email = username body = 'To complete the login process, simply click on this link: {}' login_link = self.generate_login_link(username, request) email_message = EmailMultiAlternatives( 'Your login link for ANIAuth', body.format(login_link), to=[email] ) email_message.send()
Update form to pass new test
Update form to pass new test
Python
mit
randomic/aniauth-tdd,randomic/aniauth-tdd
python
## Code Before: from django import forms from django.contrib.auth import get_user_model from django.core.mail import EmailMultiAlternatives from django.contrib.sites.shortcuts import get_current_site from django.urls import reverse_lazy from logintokens.tokens import default_token_generator USER = get_user_model() class TokenLoginForm(forms.Form): email = forms.EmailField(label="Email", max_length=254) def generate_login_link(self, email, request): protocol = 'https' if request.is_secure() else 'http' domain = get_current_site(request).domain url = reverse_lazy('token_login') token = default_token_generator.make_token(email) return '{}://{}{}?token={}'.format(protocol, domain, url, token) def save(self, request): """Generate a login token and send it to the email from the form. """ email = self.cleaned_data['email'] body = 'To complete the login process, simply click on this link: {}' login_link = self.generate_login_link(email, request) email_message = EmailMultiAlternatives( 'Your login link for ANIAuth', body.format(login_link), to=[email] ) email_message.send() ## Instruction: Update form to pass new test ## Code After: from django import forms from django.contrib.auth import get_user_model from django.contrib.auth.forms import UsernameField from django.core.mail import EmailMultiAlternatives from django.contrib.sites.shortcuts import get_current_site from django.urls import reverse_lazy from logintokens.tokens import default_token_generator USER = get_user_model() class TokenLoginForm(forms.Form): email = UsernameField( max_length=254, widget=forms.TextInput(attrs={'autofocus': True}), ) def generate_login_link(self, username, request): protocol = 'https' if request.is_secure() else 'http' domain = get_current_site(request).domain url = reverse_lazy('token_login') token = default_token_generator.make_token(username) return '{}://{}{}?token={}'.format(protocol, domain, url, token) def save(self, request): """Generate a login token and send it to the email from the form. """ username = self.cleaned_data['email'] try: user = USER._default_manager.get_by_natural_key(username) email = getattr(user, USER.EMAIL_FIELD) except USER.DoesNotExist: email = username body = 'To complete the login process, simply click on this link: {}' login_link = self.generate_login_link(username, request) email_message = EmailMultiAlternatives( 'Your login link for ANIAuth', body.format(login_link), to=[email] ) email_message.send()
// ... existing code ... from django import forms from django.contrib.auth import get_user_model from django.contrib.auth.forms import UsernameField from django.core.mail import EmailMultiAlternatives from django.contrib.sites.shortcuts import get_current_site from django.urls import reverse_lazy // ... modified code ... class TokenLoginForm(forms.Form): email = UsernameField( max_length=254, widget=forms.TextInput(attrs={'autofocus': True}), ) def generate_login_link(self, username, request): protocol = 'https' if request.is_secure() else 'http' domain = get_current_site(request).domain url = reverse_lazy('token_login') token = default_token_generator.make_token(username) return '{}://{}{}?token={}'.format(protocol, domain, url, token) def save(self, request): ... """Generate a login token and send it to the email from the form. """ username = self.cleaned_data['email'] try: user = USER._default_manager.get_by_natural_key(username) email = getattr(user, USER.EMAIL_FIELD) except USER.DoesNotExist: email = username body = 'To complete the login process, simply click on this link: {}' login_link = self.generate_login_link(username, request) email_message = EmailMultiAlternatives( 'Your login link for ANIAuth', // ... rest of the code ...
57a937839dddf814fc18b66c6143355935706a28
src/java/com/threerings/whirled/client/SceneService.java
src/java/com/threerings/whirled/client/SceneService.java
// // $Id: SceneService.java,v 1.6 2002/04/15 16:28:03 shaper Exp $ package com.threerings.whirled.client; import com.threerings.presents.client.Client; import com.threerings.presents.client.InvocationDirector; import com.threerings.whirled.Log; import com.threerings.whirled.data.SceneCodes; /** * The scene service class provides the client interface to the scene * related invocation services (e.g. moving from scene to scene). */ public class SceneService implements SceneCodes { /** * Requests that that this client's body be moved to the specified * scene. * * @param sceneId the scene id to which we want to move. * @param sceneVers the version number of the scene object that we * have in our local repository. */ public static void moveTo (Client client, int sceneId, int sceneVers, SceneDirector rsptarget) { InvocationDirector invdir = client.getInvocationDirector(); Object[] args = new Object[] { new Integer(sceneId), new Integer(sceneVers) }; invdir.invoke(MODULE_NAME, MOVE_TO_REQUEST, args, rsptarget); Log.info("Sent moveTo request [scene=" + sceneId + ", version=" + sceneVers + "]."); } }
// // $Id: SceneService.java,v 1.7 2002/04/26 00:07:05 ray Exp $ package com.threerings.whirled.client; import com.threerings.presents.client.Client; import com.threerings.presents.client.InvocationDirector; import com.threerings.whirled.Log; import com.threerings.whirled.data.SceneCodes; /** * The scene service class provides the client interface to the scene * related invocation services (e.g. moving from scene to scene). */ public class SceneService implements SceneCodes { /** * Requests that that this client's body be moved to the specified * scene. * * @param sceneId the scene id to which we want to move. * @param sceneVers the version number of the scene object that we * have in our local repository. */ public static void moveTo (Client client, int sceneId, int sceneVers, Object rsptarget) { InvocationDirector invdir = client.getInvocationDirector(); Object[] args = new Object[] { new Integer(sceneId), new Integer(sceneVers) }; invdir.invoke(MODULE_NAME, MOVE_TO_REQUEST, args, rsptarget); Log.info("Sent moveTo request [scene=" + sceneId + ", version=" + sceneVers + "]."); } }
Allow any object to be used as the response target for a moveTo request.
Allow any object to be used as the response target for a moveTo request. git-svn-id: a1a4b28b82a3276cc491891159dd9963a0a72fae@1293 542714f4-19e9-0310-aa3c-eee0fc999fb1
Java
lgpl-2.1
threerings/narya,threerings/narya,threerings/narya,threerings/narya,threerings/narya
java
## Code Before: // // $Id: SceneService.java,v 1.6 2002/04/15 16:28:03 shaper Exp $ package com.threerings.whirled.client; import com.threerings.presents.client.Client; import com.threerings.presents.client.InvocationDirector; import com.threerings.whirled.Log; import com.threerings.whirled.data.SceneCodes; /** * The scene service class provides the client interface to the scene * related invocation services (e.g. moving from scene to scene). */ public class SceneService implements SceneCodes { /** * Requests that that this client's body be moved to the specified * scene. * * @param sceneId the scene id to which we want to move. * @param sceneVers the version number of the scene object that we * have in our local repository. */ public static void moveTo (Client client, int sceneId, int sceneVers, SceneDirector rsptarget) { InvocationDirector invdir = client.getInvocationDirector(); Object[] args = new Object[] { new Integer(sceneId), new Integer(sceneVers) }; invdir.invoke(MODULE_NAME, MOVE_TO_REQUEST, args, rsptarget); Log.info("Sent moveTo request [scene=" + sceneId + ", version=" + sceneVers + "]."); } } ## Instruction: Allow any object to be used as the response target for a moveTo request. git-svn-id: a1a4b28b82a3276cc491891159dd9963a0a72fae@1293 542714f4-19e9-0310-aa3c-eee0fc999fb1 ## Code After: // // $Id: SceneService.java,v 1.7 2002/04/26 00:07:05 ray Exp $ package com.threerings.whirled.client; import com.threerings.presents.client.Client; import com.threerings.presents.client.InvocationDirector; import com.threerings.whirled.Log; import com.threerings.whirled.data.SceneCodes; /** * The scene service class provides the client interface to the scene * related invocation services (e.g. moving from scene to scene). */ public class SceneService implements SceneCodes { /** * Requests that that this client's body be moved to the specified * scene. * * @param sceneId the scene id to which we want to move. * @param sceneVers the version number of the scene object that we * have in our local repository. */ public static void moveTo (Client client, int sceneId, int sceneVers, Object rsptarget) { InvocationDirector invdir = client.getInvocationDirector(); Object[] args = new Object[] { new Integer(sceneId), new Integer(sceneVers) }; invdir.invoke(MODULE_NAME, MOVE_TO_REQUEST, args, rsptarget); Log.info("Sent moveTo request [scene=" + sceneId + ", version=" + sceneVers + "]."); } }
# ... existing code ... // // $Id: SceneService.java,v 1.7 2002/04/26 00:07:05 ray Exp $ package com.threerings.whirled.client; # ... modified code ... * have in our local repository. */ public static void moveTo (Client client, int sceneId, int sceneVers, Object rsptarget) { InvocationDirector invdir = client.getInvocationDirector(); Object[] args = new Object[] { # ... rest of the code ...
e8b8c257c71b6c02fa691557618261e6832fba94
faker/providers/ssn/uk_UA/__init__.py
faker/providers/ssn/uk_UA/__init__.py
from __future__ import unicode_literals from .. import Provider as SsnProvider # Note: as there no SSN in Ukraine # we get value added tax identification number (VATIN) here. # It is also called "Ідентифікаційний номер платника податків" (in ukrainian). # It contains only digits and length if 12. class Provider(SsnProvider): ssn_formats = ("############",)
from __future__ import unicode_literals from datetime import date from .. import Provider as SsnProvider from faker.providers.date_time import Provider as DateTimeProvider class Provider(SsnProvider): @classmethod def ssn(cls): """ Ukrainian "Реєстраційний номер облікової картки платника податків" also known as "Ідентифікаційний номер фізичної особи". """ digits = [] # Number of days between 1899-12-31 and a birth date for digit in str((DateTimeProvider.date_object() - date(1899, 12, 31)).days): digits.append(int(digit)) # Person's sequence number for _ in range(4): digits.append(cls.random_int(0, 9)) checksum = (digits[0]*-1 + digits[1]*5 + digits[2]*7 + digits[3]*9 + digits[4]*4 + digits[5]*6 + digits[6]*10 + digits[7]*5 + digits[8]*7) # Remainder of a checksum divided by 11 or 1 if it equals to 10 digits.append(checksum % 11 % 10) return ''.join(str(digit) for digit in digits)
Make the Ukrainian SSN provider realer
Make the Ukrainian SSN provider realer
Python
mit
joke2k/faker,danhuss/faker,trtd/faker,joke2k/faker
python
## Code Before: from __future__ import unicode_literals from .. import Provider as SsnProvider # Note: as there no SSN in Ukraine # we get value added tax identification number (VATIN) here. # It is also called "Ідентифікаційний номер платника податків" (in ukrainian). # It contains only digits and length if 12. class Provider(SsnProvider): ssn_formats = ("############",) ## Instruction: Make the Ukrainian SSN provider realer ## Code After: from __future__ import unicode_literals from datetime import date from .. import Provider as SsnProvider from faker.providers.date_time import Provider as DateTimeProvider class Provider(SsnProvider): @classmethod def ssn(cls): """ Ukrainian "Реєстраційний номер облікової картки платника податків" also known as "Ідентифікаційний номер фізичної особи". """ digits = [] # Number of days between 1899-12-31 and a birth date for digit in str((DateTimeProvider.date_object() - date(1899, 12, 31)).days): digits.append(int(digit)) # Person's sequence number for _ in range(4): digits.append(cls.random_int(0, 9)) checksum = (digits[0]*-1 + digits[1]*5 + digits[2]*7 + digits[3]*9 + digits[4]*4 + digits[5]*6 + digits[6]*10 + digits[7]*5 + digits[8]*7) # Remainder of a checksum divided by 11 or 1 if it equals to 10 digits.append(checksum % 11 % 10) return ''.join(str(digit) for digit in digits)
# ... existing code ... from __future__ import unicode_literals from datetime import date from .. import Provider as SsnProvider from faker.providers.date_time import Provider as DateTimeProvider class Provider(SsnProvider): @classmethod def ssn(cls): """ Ukrainian "Реєстраційний номер облікової картки платника податків" also known as "Ідентифікаційний номер фізичної особи". """ digits = [] # Number of days between 1899-12-31 and a birth date for digit in str((DateTimeProvider.date_object() - date(1899, 12, 31)).days): digits.append(int(digit)) # Person's sequence number for _ in range(4): digits.append(cls.random_int(0, 9)) checksum = (digits[0]*-1 + digits[1]*5 + digits[2]*7 + digits[3]*9 + digits[4]*4 + digits[5]*6 + digits[6]*10 + digits[7]*5 + digits[8]*7) # Remainder of a checksum divided by 11 or 1 if it equals to 10 digits.append(checksum % 11 % 10) return ''.join(str(digit) for digit in digits) # ... rest of the code ...
e652951880fc5449334d0c6159df0cf04b478c1e
src/test/com/faveset/khttp/RequestHeaderHandlerTest.java
src/test/com/faveset/khttp/RequestHeaderHandlerTest.java
// Copyright 2014, Kevin Ko <[email protected]> package com.faveset.khttp; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import org.junit.Test; import org.junit.Ignore; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import java.nio.ByteBuffer; @RunWith(JUnit4.class) public class RequestHeaderHandlerTest { @Test public void testSimple() throws InvalidRequestException { ByteBuffer buf = Helper.makeByteBuffer("hello: world\n"); HandlerState state = new HandlerState(); RequestHeaderHandler handler = new RequestHeaderHandler(); assertFalse(handler.handleState(null, buf, state)); assertEquals(1, state.getRequest().getHeader("hello").size()); assertEquals("world", state.getRequest().getHeaderFirst("hello")); buf.clear(); buf.put(Helper.makeByteBuffer("\n")); buf.flip(); assertTrue(handler.handleState(null, buf, state)); } }
// Copyright 2014, Kevin Ko <[email protected]> package com.faveset.khttp; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import org.junit.Test; import org.junit.Ignore; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import java.nio.ByteBuffer; @RunWith(JUnit4.class) public class RequestHeaderHandlerTest { @Test public void testSimple() throws InvalidRequestException { ByteBuffer buf = Helper.makeByteBuffer("hello: world\n"); HandlerState state = new HandlerState(); RequestHeaderHandler handler = new RequestHeaderHandler(); assertFalse(handler.handleState(null, buf, state)); assertEquals(1, state.getRequest().getHeader("hello").size()); assertEquals("world", state.getRequest().getHeaderFirst("hello")); buf.clear(); buf.put(Helper.makeByteBuffer("foo: bar\r\n")); buf.flip(); assertFalse(handler.handleState(null, buf, state)); assertEquals(1, state.getRequest().getHeader("foo").size()); assertEquals("bar", state.getRequest().getHeaderFirst("foo")); buf.clear(); buf.put(Helper.makeByteBuffer("\n")); buf.flip(); assertTrue(handler.handleState(null, buf, state)); } }
Expand simple test with another header.
Expand simple test with another header.
Java
bsd-3-clause
kevinko/mahttp,kevinko/mahttp
java
## Code Before: // Copyright 2014, Kevin Ko <[email protected]> package com.faveset.khttp; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import org.junit.Test; import org.junit.Ignore; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import java.nio.ByteBuffer; @RunWith(JUnit4.class) public class RequestHeaderHandlerTest { @Test public void testSimple() throws InvalidRequestException { ByteBuffer buf = Helper.makeByteBuffer("hello: world\n"); HandlerState state = new HandlerState(); RequestHeaderHandler handler = new RequestHeaderHandler(); assertFalse(handler.handleState(null, buf, state)); assertEquals(1, state.getRequest().getHeader("hello").size()); assertEquals("world", state.getRequest().getHeaderFirst("hello")); buf.clear(); buf.put(Helper.makeByteBuffer("\n")); buf.flip(); assertTrue(handler.handleState(null, buf, state)); } } ## Instruction: Expand simple test with another header. ## Code After: // Copyright 2014, Kevin Ko <[email protected]> package com.faveset.khttp; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import org.junit.Test; import org.junit.Ignore; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import java.nio.ByteBuffer; @RunWith(JUnit4.class) public class RequestHeaderHandlerTest { @Test public void testSimple() throws InvalidRequestException { ByteBuffer buf = Helper.makeByteBuffer("hello: world\n"); HandlerState state = new HandlerState(); RequestHeaderHandler handler = new RequestHeaderHandler(); assertFalse(handler.handleState(null, buf, state)); assertEquals(1, state.getRequest().getHeader("hello").size()); assertEquals("world", state.getRequest().getHeaderFirst("hello")); buf.clear(); buf.put(Helper.makeByteBuffer("foo: bar\r\n")); buf.flip(); assertFalse(handler.handleState(null, buf, state)); assertEquals(1, state.getRequest().getHeader("foo").size()); assertEquals("bar", state.getRequest().getHeaderFirst("foo")); buf.clear(); buf.put(Helper.makeByteBuffer("\n")); buf.flip(); assertTrue(handler.handleState(null, buf, state)); } }
... assertEquals("world", state.getRequest().getHeaderFirst("hello")); buf.clear(); buf.put(Helper.makeByteBuffer("foo: bar\r\n")); buf.flip(); assertFalse(handler.handleState(null, buf, state)); assertEquals(1, state.getRequest().getHeader("foo").size()); assertEquals("bar", state.getRequest().getHeaderFirst("foo")); buf.clear(); buf.put(Helper.makeByteBuffer("\n")); buf.flip(); assertTrue(handler.handleState(null, buf, state)); ...
53d09ddacc92a52219a3cd18bba606840b870fcd
vumi_http_proxy/test/test_servicemaker.py
vumi_http_proxy/test/test_servicemaker.py
from vumi_http_proxy.servicemaker import Options, ProxyWorkerServiceMaker from vumi_http_proxy import http_proxy from twisted.trial import unittest class TestOptions(unittest.TestCase): def test_defaults(self): options = Options() options.parseOptions([]) self.assertEqual(options["port"], 8080) self.assertEqual(str(options["interface"]), "0.0.0.0") def test_override(self): options = Options() options.parseOptions(["--port", 8000]) options.parseOptions(["--interface", "127.0.0.1"]) self.assertEqual(options["port"], "8000") self.assertEqual(str(options["interface"]), "127.0.0.1") class TestProxyWorkerServiceMaker(unittest.TestCase): def test_makeService(self): options = Options() options.parseOptions([]) servicemaker = ProxyWorkerServiceMaker() service = servicemaker.makeService(options) self.assertTrue(isinstance(service.factory, http_proxy.ProxyFactory)) self.assertEqual(service.endpoint._interface, '0.0.0.0') self.assertEqual(service.endpoint._port, 8080)
from vumi_http_proxy.servicemaker import ( Options, ProxyWorkerServiceMaker, client) from vumi_http_proxy import http_proxy from twisted.trial import unittest from vumi_http_proxy.test import helpers class TestOptions(unittest.TestCase): def test_defaults(self): options = Options() options.parseOptions([]) self.assertEqual(options["port"], 8080) self.assertEqual(str(options["interface"]), "0.0.0.0") def test_override(self): options = Options() options.parseOptions(["--port", 8000]) options.parseOptions(["--interface", "127.0.0.1"]) self.assertEqual(options["port"], "8000") self.assertEqual(str(options["interface"]), "127.0.0.1") class TestProxyWorkerServiceMaker(unittest.TestCase): def test_makeService(self): options = Options() options.parseOptions([]) self.patch(client, 'createResolver', lambda: helpers.TestResolver()) servicemaker = ProxyWorkerServiceMaker() service = servicemaker.makeService(options) self.assertTrue(isinstance(service.factory, http_proxy.ProxyFactory)) self.assertEqual(service.endpoint._interface, '0.0.0.0') self.assertEqual(service.endpoint._port, 8080)
Patch out DNS resolver in makeService tests.
Patch out DNS resolver in makeService tests.
Python
bsd-3-clause
praekelt/vumi-http-proxy,praekelt/vumi-http-proxy
python
## Code Before: from vumi_http_proxy.servicemaker import Options, ProxyWorkerServiceMaker from vumi_http_proxy import http_proxy from twisted.trial import unittest class TestOptions(unittest.TestCase): def test_defaults(self): options = Options() options.parseOptions([]) self.assertEqual(options["port"], 8080) self.assertEqual(str(options["interface"]), "0.0.0.0") def test_override(self): options = Options() options.parseOptions(["--port", 8000]) options.parseOptions(["--interface", "127.0.0.1"]) self.assertEqual(options["port"], "8000") self.assertEqual(str(options["interface"]), "127.0.0.1") class TestProxyWorkerServiceMaker(unittest.TestCase): def test_makeService(self): options = Options() options.parseOptions([]) servicemaker = ProxyWorkerServiceMaker() service = servicemaker.makeService(options) self.assertTrue(isinstance(service.factory, http_proxy.ProxyFactory)) self.assertEqual(service.endpoint._interface, '0.0.0.0') self.assertEqual(service.endpoint._port, 8080) ## Instruction: Patch out DNS resolver in makeService tests. ## Code After: from vumi_http_proxy.servicemaker import ( Options, ProxyWorkerServiceMaker, client) from vumi_http_proxy import http_proxy from twisted.trial import unittest from vumi_http_proxy.test import helpers class TestOptions(unittest.TestCase): def test_defaults(self): options = Options() options.parseOptions([]) self.assertEqual(options["port"], 8080) self.assertEqual(str(options["interface"]), "0.0.0.0") def test_override(self): options = Options() options.parseOptions(["--port", 8000]) options.parseOptions(["--interface", "127.0.0.1"]) self.assertEqual(options["port"], "8000") self.assertEqual(str(options["interface"]), "127.0.0.1") class TestProxyWorkerServiceMaker(unittest.TestCase): def test_makeService(self): options = Options() options.parseOptions([]) self.patch(client, 'createResolver', lambda: helpers.TestResolver()) servicemaker = ProxyWorkerServiceMaker() service = servicemaker.makeService(options) self.assertTrue(isinstance(service.factory, http_proxy.ProxyFactory)) self.assertEqual(service.endpoint._interface, '0.0.0.0') self.assertEqual(service.endpoint._port, 8080)
# ... existing code ... from vumi_http_proxy.servicemaker import ( Options, ProxyWorkerServiceMaker, client) from vumi_http_proxy import http_proxy from twisted.trial import unittest from vumi_http_proxy.test import helpers class TestOptions(unittest.TestCase): # ... modified code ... def test_makeService(self): options = Options() options.parseOptions([]) self.patch(client, 'createResolver', lambda: helpers.TestResolver()) servicemaker = ProxyWorkerServiceMaker() service = servicemaker.makeService(options) self.assertTrue(isinstance(service.factory, http_proxy.ProxyFactory)) # ... rest of the code ...
6f13946610745e348816e156c1c575d3ccd7ef8c
event_registration_analytic/models/sale_order.py
event_registration_analytic/models/sale_order.py
from openerp import api, models class SaleOrder(models.Model): _inherit = 'sale.order' @api.multi def action_button_confirm(self): project_obj = self.env['project.project'] event_obj = self.env['event.event'] res = super(SaleOrder, self).action_button_confirm() cond = [('analytic_account_id', '=', self.project_id.id)] project = project_obj.search(cond, limit=1) cond = [('project_id', '=', project.id)] events = event_obj.search(cond) for event in events: tickets = event.event_ticket_ids.filtered( lambda x: x.product_id.id == self.env.ref('event_sale.product_product_event').id) tickets.unlink() return res
from openerp import api, models class SaleOrder(models.Model): _inherit = 'sale.order' @api.multi def action_button_confirm(self): project_obj = self.env['project.project'] event_obj = self.env['event.event'] res = super(SaleOrder, self).action_button_confirm() for sale in self.filtered(lambda x: x.project_id): cond = [('analytic_account_id', '=', sale.project_id.id)] project = project_obj.search(cond, limit=1) cond = [('project_id', '=', project.id)] events = event_obj.search(cond) for event in events: tickets = event.event_ticket_ids.filtered( lambda x: x.product_id.id == self.env.ref('event_sale.product_product_event').id) tickets.unlink() return res
Fix bug when in sales order lines there is a nonrecurring service.
[FIX] event_registration_analytic: Fix bug when in sales order lines there is a nonrecurring service.
Python
agpl-3.0
avanzosc/event-wip
python
## Code Before: from openerp import api, models class SaleOrder(models.Model): _inherit = 'sale.order' @api.multi def action_button_confirm(self): project_obj = self.env['project.project'] event_obj = self.env['event.event'] res = super(SaleOrder, self).action_button_confirm() cond = [('analytic_account_id', '=', self.project_id.id)] project = project_obj.search(cond, limit=1) cond = [('project_id', '=', project.id)] events = event_obj.search(cond) for event in events: tickets = event.event_ticket_ids.filtered( lambda x: x.product_id.id == self.env.ref('event_sale.product_product_event').id) tickets.unlink() return res ## Instruction: [FIX] event_registration_analytic: Fix bug when in sales order lines there is a nonrecurring service. ## Code After: from openerp import api, models class SaleOrder(models.Model): _inherit = 'sale.order' @api.multi def action_button_confirm(self): project_obj = self.env['project.project'] event_obj = self.env['event.event'] res = super(SaleOrder, self).action_button_confirm() for sale in self.filtered(lambda x: x.project_id): cond = [('analytic_account_id', '=', sale.project_id.id)] project = project_obj.search(cond, limit=1) cond = [('project_id', '=', project.id)] events = event_obj.search(cond) for event in events: tickets = event.event_ticket_ids.filtered( lambda x: x.product_id.id == self.env.ref('event_sale.product_product_event').id) tickets.unlink() return res
// ... existing code ... project_obj = self.env['project.project'] event_obj = self.env['event.event'] res = super(SaleOrder, self).action_button_confirm() for sale in self.filtered(lambda x: x.project_id): cond = [('analytic_account_id', '=', sale.project_id.id)] project = project_obj.search(cond, limit=1) cond = [('project_id', '=', project.id)] events = event_obj.search(cond) for event in events: tickets = event.event_ticket_ids.filtered( lambda x: x.product_id.id == self.env.ref('event_sale.product_product_event').id) tickets.unlink() return res // ... rest of the code ...
893e52b16ea7998db1418dab8a10467a1f891289
forms.py
forms.py
from flask_wtf import Form from flask_wtf.csrf import CsrfProtect from wtforms import StringField, IntegerField, SelectField, BooleanField csrf = CsrfProtect() class Submission(Form): submission = StringField('Submission URL') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('numbers', 'numbers'), ('quotes', 'quotes')]) email = StringField('Kindle email address') kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')]) class Subreddit(Form): subreddit = StringField('Subreddit') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('numbers', 'numbers'), ('quotes', 'quotes')]) time = SelectField('Time period', choices=[('all', 'all'), ('year', 'year'), ('month', 'month'), ('week', 'week'), ('day', 'day'), ('hour', 'hour')], default='week') limit = IntegerField('Number of posts') email = StringField('Kindle email address') kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')])
from flask_wtf import FlaskForm from flask_wtf.csrf import CsrfProtect from wtforms import StringField, IntegerField, SelectField, BooleanField csrf = CsrfProtect() class Submission(FlaskForm): submission = StringField('Submission URL') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('numbers', 'numbers'), ('quotes', 'quotes')]) email = StringField('Kindle email address') kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')]) class Subreddit(FlaskForm): subreddit = StringField('Subreddit') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('numbers', 'numbers'), ('quotes', 'quotes')]) time = SelectField('Time period', choices=[('all', 'all'), ('year', 'year'), ('month', 'month'), ('week', 'week'), ('day', 'day'), ('hour', 'hour')], default='week') limit = IntegerField('Number of posts') email = StringField('Kindle email address') kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')])
Migrate from Form to FlaskForm
Migrate from Form to FlaskForm
Python
mit
JamieMagee/reddit2kindle,JamieMagee/reddit2kindle
python
## Code Before: from flask_wtf import Form from flask_wtf.csrf import CsrfProtect from wtforms import StringField, IntegerField, SelectField, BooleanField csrf = CsrfProtect() class Submission(Form): submission = StringField('Submission URL') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('numbers', 'numbers'), ('quotes', 'quotes')]) email = StringField('Kindle email address') kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')]) class Subreddit(Form): subreddit = StringField('Subreddit') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('numbers', 'numbers'), ('quotes', 'quotes')]) time = SelectField('Time period', choices=[('all', 'all'), ('year', 'year'), ('month', 'month'), ('week', 'week'), ('day', 'day'), ('hour', 'hour')], default='week') limit = IntegerField('Number of posts') email = StringField('Kindle email address') kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')]) ## Instruction: Migrate from Form to FlaskForm ## Code After: from flask_wtf import FlaskForm from flask_wtf.csrf import CsrfProtect from wtforms import StringField, IntegerField, SelectField, BooleanField csrf = CsrfProtect() class Submission(FlaskForm): submission = StringField('Submission URL') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('numbers', 'numbers'), ('quotes', 'quotes')]) email = StringField('Kindle email address') kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')]) class Subreddit(FlaskForm): subreddit = StringField('Subreddit') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('numbers', 'numbers'), ('quotes', 'quotes')]) time = SelectField('Time period', choices=[('all', 'all'), ('year', 'year'), ('month', 'month'), ('week', 'week'), ('day', 'day'), ('hour', 'hour')], default='week') limit = IntegerField('Number of posts') email = StringField('Kindle email address') kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')])
# ... existing code ... from flask_wtf import FlaskForm from flask_wtf.csrf import CsrfProtect from wtforms import StringField, IntegerField, SelectField, BooleanField # ... modified code ... csrf = CsrfProtect() class Submission(FlaskForm): submission = StringField('Submission URL') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('numbers', 'numbers'), ('quotes', 'quotes')]) ... kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')]) class Subreddit(FlaskForm): subreddit = StringField('Subreddit') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('numbers', 'numbers'), ('quotes', 'quotes')]) # ... rest of the code ...
6dcf7e509ee4f5b8b01592728fbbb1a94886a92a
src/main/java/io/github/lexware/bukkit/enderbow/EnderBowListener.java
src/main/java/io/github/lexware/bukkit/enderbow/EnderBowListener.java
package io.github.lexware.bukkit.enderbow; import org.bukkit.entity.Entity; import org.bukkit.entity.Projectile; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.entity.EntityShootBowEvent; import org.bukkit.event.entity.ProjectileHitEvent; import org.bukkit.metadata.FixedMetadataValue; import org.bukkit.metadata.MetadataValue; /** * Created by jamie on 09/01/15. */ public class EnderBowListener implements Listener { private final EnderBowPlugin plugin; public EnderBowListener(EnderBowPlugin plugin) { this.plugin = plugin; } @EventHandler public void onEntityShootBowEvent(EntityShootBowEvent event) { if(event.getBow().getItemMeta().getDisplayName().equals("Ender bow")) { event.getProjectile().setMetadata("enderBowData", new FixedMetadataValue(plugin, "enderArrow")); } } @EventHandler public void onProjectileHit(ProjectileHitEvent event) { if(event.getEntity().hasMetadata("enderBowData")) { for(MetadataValue value : event.getEntity().getMetadata("enderBowData")) { if(value.asString().equals("enderArrow")) { ((Entity)event.getEntity().getShooter()).teleport(event.getEntity().getLocation()); } } } } }
package io.github.lexware.bukkit.enderbow; import org.bukkit.entity.Entity; import org.bukkit.entity.Projectile; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.entity.EntityShootBowEvent; import org.bukkit.event.entity.ProjectileHitEvent; import org.bukkit.metadata.FixedMetadataValue; import org.bukkit.metadata.MetadataValue; /** * Created by jamie on 09/01/15. */ public class EnderBowListener implements Listener { private final EnderBowPlugin plugin; public EnderBowListener(EnderBowPlugin plugin) { this.plugin = plugin; } @EventHandler public void onEntityShootBowEvent(EntityShootBowEvent event) { if(event.getBow().hasItemMeta() && event.getBow().getItemMeta().getDisplayName().equals("Ender bow")) { event.getProjectile().setMetadata("enderBowData", new FixedMetadataValue(plugin, "enderArrow")); } } @EventHandler public void onProjectileHit(ProjectileHitEvent event) { if(event.getEntity().hasMetadata("enderBowData")) { for(MetadataValue value : event.getEntity().getMetadata("enderBowData")) { if(value.asString().equals("enderArrow")) { ((Entity)event.getEntity().getShooter()).teleport(event.getEntity().getLocation()); } } } } }
Fix NPE with normal bows.
Fix NPE with normal bows.
Java
mit
jamierocks/EnderBow
java
## Code Before: package io.github.lexware.bukkit.enderbow; import org.bukkit.entity.Entity; import org.bukkit.entity.Projectile; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.entity.EntityShootBowEvent; import org.bukkit.event.entity.ProjectileHitEvent; import org.bukkit.metadata.FixedMetadataValue; import org.bukkit.metadata.MetadataValue; /** * Created by jamie on 09/01/15. */ public class EnderBowListener implements Listener { private final EnderBowPlugin plugin; public EnderBowListener(EnderBowPlugin plugin) { this.plugin = plugin; } @EventHandler public void onEntityShootBowEvent(EntityShootBowEvent event) { if(event.getBow().getItemMeta().getDisplayName().equals("Ender bow")) { event.getProjectile().setMetadata("enderBowData", new FixedMetadataValue(plugin, "enderArrow")); } } @EventHandler public void onProjectileHit(ProjectileHitEvent event) { if(event.getEntity().hasMetadata("enderBowData")) { for(MetadataValue value : event.getEntity().getMetadata("enderBowData")) { if(value.asString().equals("enderArrow")) { ((Entity)event.getEntity().getShooter()).teleport(event.getEntity().getLocation()); } } } } } ## Instruction: Fix NPE with normal bows. ## Code After: package io.github.lexware.bukkit.enderbow; import org.bukkit.entity.Entity; import org.bukkit.entity.Projectile; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.entity.EntityShootBowEvent; import org.bukkit.event.entity.ProjectileHitEvent; import org.bukkit.metadata.FixedMetadataValue; import org.bukkit.metadata.MetadataValue; /** * Created by jamie on 09/01/15. */ public class EnderBowListener implements Listener { private final EnderBowPlugin plugin; public EnderBowListener(EnderBowPlugin plugin) { this.plugin = plugin; } @EventHandler public void onEntityShootBowEvent(EntityShootBowEvent event) { if(event.getBow().hasItemMeta() && event.getBow().getItemMeta().getDisplayName().equals("Ender bow")) { event.getProjectile().setMetadata("enderBowData", new FixedMetadataValue(plugin, "enderArrow")); } } @EventHandler public void onProjectileHit(ProjectileHitEvent event) { if(event.getEntity().hasMetadata("enderBowData")) { for(MetadataValue value : event.getEntity().getMetadata("enderBowData")) { if(value.asString().equals("enderArrow")) { ((Entity)event.getEntity().getShooter()).teleport(event.getEntity().getLocation()); } } } } }
# ... existing code ... @EventHandler public void onEntityShootBowEvent(EntityShootBowEvent event) { if(event.getBow().hasItemMeta() && event.getBow().getItemMeta().getDisplayName().equals("Ender bow")) { event.getProjectile().setMetadata("enderBowData", new FixedMetadataValue(plugin, "enderArrow")); } } # ... rest of the code ...
4a601336ee5fccfe2cb4ebf50bd7fdfe127f3a61
setup.py
setup.py
from setuptools import setup import io import os here = os.path.abspath(os.path.dirname(__file__)) def read(*filenames, **kwargs): encoding = kwargs.get('encoding', 'utf-8') sep = kwargs.get('sep', '\n') buf = [] for filename in filenames: with io.open(filename, encoding=encoding) as f: buf.append(f.read()) return sep.join(buf) long_description = read('README.md') setup( name='mongo-pool', version='0.4.2', url='http://github.com/ubervu/mongo-pool/', description='The tool that keeps all your mongos in one place', long_description=long_description, license='Apache Software License', author='UberVU', author_email="[email protected]", install_requires=['pymongo>=3.0.3'], packages=['mongo_pool'], include_package_data=True, platforms='any', test_suite='nose.collector', tests_require=['nose', 'mock'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Database', 'Topic :: Software Development :: Libraries :: Python Modules', ], extras_require={ 'testing': ['nose'], } )
from setuptools import setup import io import os here = os.path.abspath(os.path.dirname(__file__)) def read(*filenames, **kwargs): encoding = kwargs.get('encoding', 'utf-8') sep = kwargs.get('sep', '\n') buf = [] for filename in filenames: with io.open(filename, encoding=encoding) as f: buf.append(f.read()) return sep.join(buf) long_description = read('README.md') setup( name='mongo-pool', version='0.5.0', url='http://github.com/ubervu/mongo-pool/', description='The tool that keeps all your mongos in one place', long_description=long_description, license='Apache Software License', author='UberVU', author_email="[email protected]", install_requires=['pymongo>=3.6.1', 'six>=1.15.0'], packages=['mongo_pool'], include_package_data=True, platforms='any', test_suite='nose.collector', tests_require=['nose', 'mock'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Database', 'Topic :: Software Development :: Libraries :: Python Modules', ], extras_require={ 'testing': ['nose', 'mock'], } )
Update deps and bump version. ANL-10319
Update deps and bump version. ANL-10319
Python
apache-2.0
uberVU/mongo-pool,uberVU/mongo-pool
python
## Code Before: from setuptools import setup import io import os here = os.path.abspath(os.path.dirname(__file__)) def read(*filenames, **kwargs): encoding = kwargs.get('encoding', 'utf-8') sep = kwargs.get('sep', '\n') buf = [] for filename in filenames: with io.open(filename, encoding=encoding) as f: buf.append(f.read()) return sep.join(buf) long_description = read('README.md') setup( name='mongo-pool', version='0.4.2', url='http://github.com/ubervu/mongo-pool/', description='The tool that keeps all your mongos in one place', long_description=long_description, license='Apache Software License', author='UberVU', author_email="[email protected]", install_requires=['pymongo>=3.0.3'], packages=['mongo_pool'], include_package_data=True, platforms='any', test_suite='nose.collector', tests_require=['nose', 'mock'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Database', 'Topic :: Software Development :: Libraries :: Python Modules', ], extras_require={ 'testing': ['nose'], } ) ## Instruction: Update deps and bump version. ANL-10319 ## Code After: from setuptools import setup import io import os here = os.path.abspath(os.path.dirname(__file__)) def read(*filenames, **kwargs): encoding = kwargs.get('encoding', 'utf-8') sep = kwargs.get('sep', '\n') buf = [] for filename in filenames: with io.open(filename, encoding=encoding) as f: buf.append(f.read()) return sep.join(buf) long_description = read('README.md') setup( name='mongo-pool', version='0.5.0', url='http://github.com/ubervu/mongo-pool/', description='The tool that keeps all your mongos in one place', long_description=long_description, license='Apache Software License', author='UberVU', author_email="[email protected]", install_requires=['pymongo>=3.6.1', 'six>=1.15.0'], packages=['mongo_pool'], include_package_data=True, platforms='any', test_suite='nose.collector', tests_require=['nose', 'mock'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Database', 'Topic :: Software Development :: Libraries :: Python Modules', ], extras_require={ 'testing': ['nose', 'mock'], } )
// ... existing code ... setup( name='mongo-pool', version='0.5.0', url='http://github.com/ubervu/mongo-pool/', description='The tool that keeps all your mongos in one place', long_description=long_description, // ... modified code ... license='Apache Software License', author='UberVU', author_email="[email protected]", install_requires=['pymongo>=3.6.1', 'six>=1.15.0'], packages=['mongo_pool'], include_package_data=True, platforms='any', ... 'Topic :: Software Development :: Libraries :: Python Modules', ], extras_require={ 'testing': ['nose', 'mock'], } ) // ... rest of the code ...
29936557b14fdf160758f98473e8fd1fed2c91f0
src/main/java/io/cozmic/usher/pipeline/JuelMatcher.java
src/main/java/io/cozmic/usher/pipeline/JuelMatcher.java
package io.cozmic.usher.pipeline; import de.odysseus.el.util.SimpleContext; import io.cozmic.usher.core.MessageMatcher; import io.cozmic.usher.message.PipelinePack; import javax.el.ExpressionFactory; import javax.el.ValueExpression; /** * Created by chuck on 7/3/15. */ public class JuelMatcher implements MessageMatcher { private static ExpressionFactory factory = ExpressionFactory.newInstance(); private SimpleContext context = new SimpleContext(); private ValueExpression expression; public JuelMatcher(String expressionVal) { expression = factory.createValueExpression(context, expressionVal, boolean.class); } @Override public boolean matches(PipelinePack pipelinePack) { SimpleContext runtimeContext = new SimpleContext(); final Object msg = pipelinePack.getMessage(); factory.createValueExpression(runtimeContext, "${msgClassSimpleName}", String.class).setValue(runtimeContext, msg.getClass().getSimpleName()); factory.createValueExpression(runtimeContext, "${msg}", msg.getClass()).setValue(runtimeContext, msg); return (boolean) expression.getValue(runtimeContext); } }
package io.cozmic.usher.pipeline; import de.odysseus.el.util.SimpleContext; import io.cozmic.usher.core.MessageMatcher; import io.cozmic.usher.message.PipelinePack; import javax.el.ExpressionFactory; import javax.el.ValueExpression; /** * Created by chuck on 7/3/15. */ public class JuelMatcher implements MessageMatcher { private static ExpressionFactory factory = ExpressionFactory.newInstance(); private SimpleContext context = new SimpleContext(); private ValueExpression expression; public JuelMatcher(String expressionVal) { expression = factory.createValueExpression(context, expressionVal, boolean.class); } @Override public boolean matches(PipelinePack pipelinePack) { SimpleContext runtimeContext = new SimpleContext(); final Object msg = pipelinePack.getMessage(); if (msg != null) { factory.createValueExpression(runtimeContext, "${msgClassSimpleName}", String.class).setValue(runtimeContext, msg.getClass().getSimpleName()); factory.createValueExpression(runtimeContext, "${msg}", msg.getClass()).setValue(runtimeContext, msg); } return (boolean) expression.getValue(runtimeContext); } }
Fix messageMatcher in situations where message is null
Fix messageMatcher in situations where message is null
Java
apache-2.0
wired-mind/usher,wired-mind/usher
java
## Code Before: package io.cozmic.usher.pipeline; import de.odysseus.el.util.SimpleContext; import io.cozmic.usher.core.MessageMatcher; import io.cozmic.usher.message.PipelinePack; import javax.el.ExpressionFactory; import javax.el.ValueExpression; /** * Created by chuck on 7/3/15. */ public class JuelMatcher implements MessageMatcher { private static ExpressionFactory factory = ExpressionFactory.newInstance(); private SimpleContext context = new SimpleContext(); private ValueExpression expression; public JuelMatcher(String expressionVal) { expression = factory.createValueExpression(context, expressionVal, boolean.class); } @Override public boolean matches(PipelinePack pipelinePack) { SimpleContext runtimeContext = new SimpleContext(); final Object msg = pipelinePack.getMessage(); factory.createValueExpression(runtimeContext, "${msgClassSimpleName}", String.class).setValue(runtimeContext, msg.getClass().getSimpleName()); factory.createValueExpression(runtimeContext, "${msg}", msg.getClass()).setValue(runtimeContext, msg); return (boolean) expression.getValue(runtimeContext); } } ## Instruction: Fix messageMatcher in situations where message is null ## Code After: package io.cozmic.usher.pipeline; import de.odysseus.el.util.SimpleContext; import io.cozmic.usher.core.MessageMatcher; import io.cozmic.usher.message.PipelinePack; import javax.el.ExpressionFactory; import javax.el.ValueExpression; /** * Created by chuck on 7/3/15. */ public class JuelMatcher implements MessageMatcher { private static ExpressionFactory factory = ExpressionFactory.newInstance(); private SimpleContext context = new SimpleContext(); private ValueExpression expression; public JuelMatcher(String expressionVal) { expression = factory.createValueExpression(context, expressionVal, boolean.class); } @Override public boolean matches(PipelinePack pipelinePack) { SimpleContext runtimeContext = new SimpleContext(); final Object msg = pipelinePack.getMessage(); if (msg != null) { factory.createValueExpression(runtimeContext, "${msgClassSimpleName}", String.class).setValue(runtimeContext, msg.getClass().getSimpleName()); factory.createValueExpression(runtimeContext, "${msg}", msg.getClass()).setValue(runtimeContext, msg); } return (boolean) expression.getValue(runtimeContext); } }
# ... existing code ... public boolean matches(PipelinePack pipelinePack) { SimpleContext runtimeContext = new SimpleContext(); final Object msg = pipelinePack.getMessage(); if (msg != null) { factory.createValueExpression(runtimeContext, "${msgClassSimpleName}", String.class).setValue(runtimeContext, msg.getClass().getSimpleName()); factory.createValueExpression(runtimeContext, "${msg}", msg.getClass()).setValue(runtimeContext, msg); } return (boolean) expression.getValue(runtimeContext); } } # ... rest of the code ...
9836c275d79851010654aacda379ccb78cea1b27
chartflo/engine.py
chartflo/engine.py
import pandas as pd from goerr import err from dataswim import DataSwim from django.db.models.query import QuerySet from django_pandas.io import read_frame class ChartFlo(DataSwim): def __repr__(self): """ String representation of the object """ rows = str(len(self.df.columns)) return '<Chartflo object - ' + rows + " >" def load_data(self, dataset): """ Set the main dataframe with the input data """ try: df = self._load_data(dataset) self.df = df except Exception as e: err.new(e, self.load_data, "Can not load dataset") def load_data_(self, dataset): """ Returns an instance with the input data """ try: df = self._load_data(dataset) return self.clone_(df) except Exception as e: err.new(e, self._load_data, "Can not load dataset") def _load_data(self, dataset): """ Convert the input data to pandas dataframe """ df = pd.DataFrame() try: if isinstance(dataset, pd.DataFrame): return dataset elif isinstance(dataset, QuerySet): df = read_frame(dataset) elif isinstance(dataset, dict): df = self._dict_to_df(dataset) elif isinstance(dataset, list): return pd.DataFrame(dataset) else: err.new(self._load_data, "Data format unknown: " + str(type(dataset)) + " please provide a dictionnary, a Django Queryset or a Pandas DataFrame") except Exception as e: err.new(e, self._load_data, "Can not convert dataset") if err.exists: err.throw() return df def _dict_to_df(self, dictobj): """ Converts a dictionary to a pandas dataframe """ x = [] y = [] print("DICT") for datapoint in dictobj: x.append(datapoint) y.append(dictobj[datapoint]) df = pd.DataFrame(dictobj) return df cf = ChartFlo()
from dataswim import DataSwim class ChartFlo(DataSwim): def __repr__(self): """ String representation of the object """ rows = str(len(self.df.columns)) return '<Chartflo object - ' + rows + " >" cf = ChartFlo()
Move the load_data method to the Dataswim lib
Move the load_data method to the Dataswim lib
Python
mit
synw/django-chartflo,synw/django-chartflo,synw/django-chartflo
python
## Code Before: import pandas as pd from goerr import err from dataswim import DataSwim from django.db.models.query import QuerySet from django_pandas.io import read_frame class ChartFlo(DataSwim): def __repr__(self): """ String representation of the object """ rows = str(len(self.df.columns)) return '<Chartflo object - ' + rows + " >" def load_data(self, dataset): """ Set the main dataframe with the input data """ try: df = self._load_data(dataset) self.df = df except Exception as e: err.new(e, self.load_data, "Can not load dataset") def load_data_(self, dataset): """ Returns an instance with the input data """ try: df = self._load_data(dataset) return self.clone_(df) except Exception as e: err.new(e, self._load_data, "Can not load dataset") def _load_data(self, dataset): """ Convert the input data to pandas dataframe """ df = pd.DataFrame() try: if isinstance(dataset, pd.DataFrame): return dataset elif isinstance(dataset, QuerySet): df = read_frame(dataset) elif isinstance(dataset, dict): df = self._dict_to_df(dataset) elif isinstance(dataset, list): return pd.DataFrame(dataset) else: err.new(self._load_data, "Data format unknown: " + str(type(dataset)) + " please provide a dictionnary, a Django Queryset or a Pandas DataFrame") except Exception as e: err.new(e, self._load_data, "Can not convert dataset") if err.exists: err.throw() return df def _dict_to_df(self, dictobj): """ Converts a dictionary to a pandas dataframe """ x = [] y = [] print("DICT") for datapoint in dictobj: x.append(datapoint) y.append(dictobj[datapoint]) df = pd.DataFrame(dictobj) return df cf = ChartFlo() ## Instruction: Move the load_data method to the Dataswim lib ## Code After: from dataswim import DataSwim class ChartFlo(DataSwim): def __repr__(self): """ String representation of the object """ rows = str(len(self.df.columns)) return '<Chartflo object - ' + rows + " >" cf = ChartFlo()
... from dataswim import DataSwim class ChartFlo(DataSwim): ... rows = str(len(self.df.columns)) return '<Chartflo object - ' + rows + " >" cf = ChartFlo() ...
73d218160c386019ea849719d36212c06d3962d2
core/alsp_src/win32/mswin32_config.h
core/alsp_src/win32/mswin32_config.h
/* * mswin32_config.h Hand made MSWin32 configuration file. * Copyright (c) 1996 Applied Logic Systems, Inc. * * Author: Chuck Houpt * Creation: 1/30/96 */ #include "dfltsys.h" #define MSWin32 1 #define OSStr "mswin32" #ifdef __GNUC__ #define EXTERNAL_STATE 1 #endif /* Temp. disable threading until threading GUI stub is fixed */ #ifdef __GNUC__ #define Bytecode 1 #endif #define HAVE_STDARG_H 1 #define HAVE_STDLIB_H 1 #define HAVE_FCNTL_H 1 #define HAVE_STRING_H 1 #define HAVE_SRAND 1 #define HAVE_TIME 1 #define HAVE_SOCKET 1 #define BERKELEY_SOCKETS 1 #define HAVE_SELECT 1 #define MISSING_UNIX_DOMAIN_SOCKETS 1 #define APP_PRINTF_CALLBACK 1 #define HAVE_STRCSPN 1 #define HAVE_STRSPN 1 #define HAVE_STRTOK 1 #define REVERSE_ENDIAN 1 /* The windows headers in Cygwin 1.3.4 are missing some prototypes, so define them here to silence the waring messages. */ #ifdef __GNUC__ extern __inline__ void* GetCurrentFiber(void); extern __inline__ void* GetFiberData(void); #endif #include <winsock2.h> #include <windows.h>
/* * mswin32_config.h Hand made MSWin32 configuration file. * Copyright (c) 1996 Applied Logic Systems, Inc. * * Author: Chuck Houpt * Creation: 1/30/96 */ #include "dfltsys.h" #define MSWin32 1 #define OSStr "mswin32" #ifdef __GNUC__ #define EXTERNAL_STATE 1 #endif /* Temp. disable threading until threading GUI stub is fixed */ #ifdef __GNUC__ #define Bytecode 1 #endif #define HAVE_STDARG_H 1 #define HAVE_STDLIB_H 1 #define HAVE_FCNTL_H 1 #define HAVE_STRING_H 1 #define HAVE_SRAND 1 #define HAVE_TIME 1 #define HAVE_SOCKET 1 #define BERKELEY_SOCKETS 1 #define HAVE_SELECT 1 #define MISSING_UNIX_DOMAIN_SOCKETS 1 #define APP_PRINTF_CALLBACK 1 #define HAVE_STRCSPN 1 #define HAVE_STRSPN 1 #define HAVE_STRTOK 1 #define REVERSE_ENDIAN 1 #include <winsock2.h> #include <windows.h>
Remove defunct missing defs that interfere with CI build
Remove defunct missing defs that interfere with CI build
C
mit
AppliedLogicSystems/ALSProlog,AppliedLogicSystems/ALSProlog,AppliedLogicSystems/ALSProlog,AppliedLogicSystems/ALSProlog,AppliedLogicSystems/ALSProlog,AppliedLogicSystems/ALSProlog,AppliedLogicSystems/ALSProlog,AppliedLogicSystems/ALSProlog,AppliedLogicSystems/ALSProlog
c
## Code Before: /* * mswin32_config.h Hand made MSWin32 configuration file. * Copyright (c) 1996 Applied Logic Systems, Inc. * * Author: Chuck Houpt * Creation: 1/30/96 */ #include "dfltsys.h" #define MSWin32 1 #define OSStr "mswin32" #ifdef __GNUC__ #define EXTERNAL_STATE 1 #endif /* Temp. disable threading until threading GUI stub is fixed */ #ifdef __GNUC__ #define Bytecode 1 #endif #define HAVE_STDARG_H 1 #define HAVE_STDLIB_H 1 #define HAVE_FCNTL_H 1 #define HAVE_STRING_H 1 #define HAVE_SRAND 1 #define HAVE_TIME 1 #define HAVE_SOCKET 1 #define BERKELEY_SOCKETS 1 #define HAVE_SELECT 1 #define MISSING_UNIX_DOMAIN_SOCKETS 1 #define APP_PRINTF_CALLBACK 1 #define HAVE_STRCSPN 1 #define HAVE_STRSPN 1 #define HAVE_STRTOK 1 #define REVERSE_ENDIAN 1 /* The windows headers in Cygwin 1.3.4 are missing some prototypes, so define them here to silence the waring messages. */ #ifdef __GNUC__ extern __inline__ void* GetCurrentFiber(void); extern __inline__ void* GetFiberData(void); #endif #include <winsock2.h> #include <windows.h> ## Instruction: Remove defunct missing defs that interfere with CI build ## Code After: /* * mswin32_config.h Hand made MSWin32 configuration file. * Copyright (c) 1996 Applied Logic Systems, Inc. * * Author: Chuck Houpt * Creation: 1/30/96 */ #include "dfltsys.h" #define MSWin32 1 #define OSStr "mswin32" #ifdef __GNUC__ #define EXTERNAL_STATE 1 #endif /* Temp. disable threading until threading GUI stub is fixed */ #ifdef __GNUC__ #define Bytecode 1 #endif #define HAVE_STDARG_H 1 #define HAVE_STDLIB_H 1 #define HAVE_FCNTL_H 1 #define HAVE_STRING_H 1 #define HAVE_SRAND 1 #define HAVE_TIME 1 #define HAVE_SOCKET 1 #define BERKELEY_SOCKETS 1 #define HAVE_SELECT 1 #define MISSING_UNIX_DOMAIN_SOCKETS 1 #define APP_PRINTF_CALLBACK 1 #define HAVE_STRCSPN 1 #define HAVE_STRSPN 1 #define HAVE_STRTOK 1 #define REVERSE_ENDIAN 1 #include <winsock2.h> #include <windows.h>
// ... existing code ... #define REVERSE_ENDIAN 1 #include <winsock2.h> #include <windows.h> // ... rest of the code ...
2512ff39651d55c2c17ad1b4e05a0fc5d0bee415
indra/tests/test_chebi_client.py
indra/tests/test_chebi_client.py
from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str from indra.databases import chebi_client from indra.util import unicode_strs from nose.plugins.attrib import attr def test_read_chebi_to_pubchem(): (ctop, ptoc) = chebi_client._read_chebi_to_pubchem() assert ctop['85673'] == '252150010' assert ptoc['252150010'] == '85673' assert unicode_strs((ctop, ptoc)) def test_read_chebi_to_chembl(): ctoc = chebi_client._read_chebi_to_chembl() assert ctoc['50729'] == 'CHEMBL58' assert unicode_strs(ctoc) def test_cas_to_chebi(): assert chebi_client.get_chebi_id_from_cas('23261-20-3') == '18035' assert chebi_client.get_chebi_id_from_cas('100-51-6') == '17987' assert chebi_client.get_chebi_id_from_cas('-1') is None
from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str from indra.databases import chebi_client from indra.util import unicode_strs from nose.plugins.attrib import attr def test_read_chebi_to_pubchem(): (ctop, ptoc) = chebi_client._read_chebi_to_pubchem() assert ctop['85673'] == '91481662' assert ptoc['91481662'] == '85673' assert unicode_strs((ctop, ptoc)) def test_read_chebi_to_chembl(): ctoc = chebi_client._read_chebi_to_chembl() assert ctoc['50729'] == 'CHEMBL58' assert unicode_strs(ctoc) def test_cas_to_chebi(): assert chebi_client.get_chebi_id_from_cas('23261-20-3') == '18035' assert chebi_client.get_chebi_id_from_cas('100-51-6') == '17987' assert chebi_client.get_chebi_id_from_cas('-1') is None
Fix ChEBI to Pubchem mapping test
Fix ChEBI to Pubchem mapping test
Python
bsd-2-clause
johnbachman/indra,johnbachman/belpy,sorgerlab/indra,bgyori/indra,johnbachman/belpy,sorgerlab/indra,pvtodorov/indra,sorgerlab/belpy,bgyori/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/belpy,pvtodorov/indra,sorgerlab/belpy,bgyori/indra,pvtodorov/indra,johnbachman/indra,pvtodorov/indra,sorgerlab/indra
python
## Code Before: from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str from indra.databases import chebi_client from indra.util import unicode_strs from nose.plugins.attrib import attr def test_read_chebi_to_pubchem(): (ctop, ptoc) = chebi_client._read_chebi_to_pubchem() assert ctop['85673'] == '252150010' assert ptoc['252150010'] == '85673' assert unicode_strs((ctop, ptoc)) def test_read_chebi_to_chembl(): ctoc = chebi_client._read_chebi_to_chembl() assert ctoc['50729'] == 'CHEMBL58' assert unicode_strs(ctoc) def test_cas_to_chebi(): assert chebi_client.get_chebi_id_from_cas('23261-20-3') == '18035' assert chebi_client.get_chebi_id_from_cas('100-51-6') == '17987' assert chebi_client.get_chebi_id_from_cas('-1') is None ## Instruction: Fix ChEBI to Pubchem mapping test ## Code After: from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str from indra.databases import chebi_client from indra.util import unicode_strs from nose.plugins.attrib import attr def test_read_chebi_to_pubchem(): (ctop, ptoc) = chebi_client._read_chebi_to_pubchem() assert ctop['85673'] == '91481662' assert ptoc['91481662'] == '85673' assert unicode_strs((ctop, ptoc)) def test_read_chebi_to_chembl(): ctoc = chebi_client._read_chebi_to_chembl() assert ctoc['50729'] == 'CHEMBL58' assert unicode_strs(ctoc) def test_cas_to_chebi(): assert chebi_client.get_chebi_id_from_cas('23261-20-3') == '18035' assert chebi_client.get_chebi_id_from_cas('100-51-6') == '17987' assert chebi_client.get_chebi_id_from_cas('-1') is None
# ... existing code ... def test_read_chebi_to_pubchem(): (ctop, ptoc) = chebi_client._read_chebi_to_pubchem() assert ctop['85673'] == '91481662' assert ptoc['91481662'] == '85673' assert unicode_strs((ctop, ptoc)) # ... rest of the code ...
9c926f325de84b4692d72ac67050bc6f66bbd47d
include/grpc/impl/codegen/fork.h
include/grpc/impl/codegen/fork.h
/* * * Copyright 2017 gRPC authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #ifndef GRPC_IMPL_CODEGEN_FORK_H #define GRPC_IMPL_CODEGEN_FORK_H /** * gRPC applications should call this before calling fork(). There should be no * active gRPC function calls between calling grpc_prefork() and * grpc_postfork_parent()/grpc_postfork_child(). * * * Typical use: * grpc_prefork(); * int pid = fork(); * if (pid) { * grpc_postfork_parent(); * // Parent process.. * } else { * grpc_postfork_child(); * // Child process... * } */ void grpc_prefork(); void grpc_postfork_parent(); void grpc_postfork_child(); void grpc_fork_handlers_auto_register(); #endif /* GRPC_IMPL_CODEGEN_FORK_H */
/* * * Copyright 2017 gRPC authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #ifndef GRPC_IMPL_CODEGEN_FORK_H #define GRPC_IMPL_CODEGEN_FORK_H /** * gRPC applications should call this before calling fork(). There should be no * active gRPC function calls between calling grpc_prefork() and * grpc_postfork_parent()/grpc_postfork_child(). * * * Typical use: * grpc_prefork(); * int pid = fork(); * if (pid) { * grpc_postfork_parent(); * // Parent process.. * } else { * grpc_postfork_child(); * // Child process... * } */ void grpc_prefork(void); void grpc_postfork_parent(void); void grpc_postfork_child(void); void grpc_fork_handlers_auto_register(void); #endif /* GRPC_IMPL_CODEGEN_FORK_H */
Resolve Swift warnings by specifying void arguments
Resolve Swift warnings by specifying void arguments The following functions in the `fork.h` file cause a `This function declaration is not a prototype` warning in Swift: ``` void grpc_prefork(void); void grpc_postfork_parent(void); void grpc_postfork_child(void); void grpc_fork_handlers_auto_register(void); ``` Explicitly specifying `void` as the argument resolves the warnings. Reproducible using Xcode 9.2 with `SwiftGRPC`/`gRPC-Core` via CocoaPods.
C
apache-2.0
dgquintas/grpc,jtattermusch/grpc,ncteisen/grpc,firebase/grpc,jboeuf/grpc,jboeuf/grpc,nicolasnoble/grpc,thinkerou/grpc,donnadionne/grpc,grpc/grpc,nicolasnoble/grpc,pszemus/grpc,ncteisen/grpc,stanley-cheung/grpc,donnadionne/grpc,ctiller/grpc,donnadionne/grpc,ctiller/grpc,ncteisen/grpc,jboeuf/grpc,ejona86/grpc,mehrdada/grpc,Vizerai/grpc,vjpai/grpc,carl-mastrangelo/grpc,nicolasnoble/grpc,dgquintas/grpc,simonkuang/grpc,stanley-cheung/grpc,muxi/grpc,stanley-cheung/grpc,Vizerai/grpc,thinkerou/grpc,Vizerai/grpc,mehrdada/grpc,grpc/grpc,ncteisen/grpc,ctiller/grpc,chrisdunelm/grpc,ejona86/grpc,firebase/grpc,nicolasnoble/grpc,thinkerou/grpc,vjpai/grpc,thinkerou/grpc,jtattermusch/grpc,thinkerou/grpc,muxi/grpc,thinkerou/grpc,pszemus/grpc,thinkerou/grpc,sreecha/grpc,jtattermusch/grpc,carl-mastrangelo/grpc,sreecha/grpc,dgquintas/grpc,Vizerai/grpc,mehrdada/grpc,chrisdunelm/grpc,sreecha/grpc,ejona86/grpc,firebase/grpc,ctiller/grpc,nicolasnoble/grpc,donnadionne/grpc,carl-mastrangelo/grpc,firebase/grpc,jtattermusch/grpc,donnadionne/grpc,muxi/grpc,Vizerai/grpc,thinkerou/grpc,ctiller/grpc,stanley-cheung/grpc,ncteisen/grpc,vjpai/grpc,simonkuang/grpc,carl-mastrangelo/grpc,sreecha/grpc,pszemus/grpc,jtattermusch/grpc,muxi/grpc,ejona86/grpc,carl-mastrangelo/grpc,thinkerou/grpc,sreecha/grpc,vjpai/grpc,Vizerai/grpc,Vizerai/grpc,jboeuf/grpc,donnadionne/grpc,dgquintas/grpc,sreecha/grpc,nicolasnoble/grpc,nicolasnoble/grpc,donnadionne/grpc,chrisdunelm/grpc,simonkuang/grpc,dgquintas/grpc,chrisdunelm/grpc,pszemus/grpc,ctiller/grpc,mehrdada/grpc,chrisdunelm/grpc,jboeuf/grpc,jboeuf/grpc,stanley-cheung/grpc,ejona86/grpc,thinkerou/grpc,ncteisen/grpc,nicolasnoble/grpc,mehrdada/grpc,simonkuang/grpc,grpc/grpc,carl-mastrangelo/grpc,jboeuf/grpc,firebase/grpc,donnadionne/grpc,vjpai/grpc,grpc/grpc,sreecha/grpc,carl-mastrangelo/grpc,grpc/grpc,vjpai/grpc,nicolasnoble/grpc,vjpai/grpc,grpc/grpc,firebase/grpc,carl-mastrangelo/grpc,sreecha/grpc,muxi/grpc,carl-mastrangelo/grpc,carl-mastrangelo/grpc,chrisdunelm/grpc,mehrdada/grpc,jtattermusch/grpc,grpc/grpc,ctiller/grpc,nicolasnoble/grpc,firebase/grpc,donnadionne/grpc,carl-mastrangelo/grpc,mehrdada/grpc,nicolasnoble/grpc,firebase/grpc,ejona86/grpc,mehrdada/grpc,pszemus/grpc,ncteisen/grpc,muxi/grpc,stanley-cheung/grpc,ejona86/grpc,ejona86/grpc,stanley-cheung/grpc,dgquintas/grpc,muxi/grpc,chrisdunelm/grpc,ctiller/grpc,ctiller/grpc,sreecha/grpc,Vizerai/grpc,dgquintas/grpc,jtattermusch/grpc,stanley-cheung/grpc,grpc/grpc,thinkerou/grpc,ncteisen/grpc,jtattermusch/grpc,simonkuang/grpc,mehrdada/grpc,donnadionne/grpc,simonkuang/grpc,jboeuf/grpc,vjpai/grpc,vjpai/grpc,ncteisen/grpc,Vizerai/grpc,ejona86/grpc,chrisdunelm/grpc,simonkuang/grpc,vjpai/grpc,firebase/grpc,ejona86/grpc,pszemus/grpc,mehrdada/grpc,jboeuf/grpc,firebase/grpc,simonkuang/grpc,jboeuf/grpc,dgquintas/grpc,carl-mastrangelo/grpc,pszemus/grpc,pszemus/grpc,firebase/grpc,grpc/grpc,ncteisen/grpc,Vizerai/grpc,muxi/grpc,muxi/grpc,dgquintas/grpc,stanley-cheung/grpc,jtattermusch/grpc,sreecha/grpc,dgquintas/grpc,sreecha/grpc,pszemus/grpc,mehrdada/grpc,sreecha/grpc,muxi/grpc,ctiller/grpc,ncteisen/grpc,nicolasnoble/grpc,ctiller/grpc,grpc/grpc,chrisdunelm/grpc,stanley-cheung/grpc,ctiller/grpc,jtattermusch/grpc,donnadionne/grpc,ncteisen/grpc,pszemus/grpc,simonkuang/grpc,grpc/grpc,jboeuf/grpc,thinkerou/grpc,chrisdunelm/grpc,muxi/grpc,stanley-cheung/grpc,mehrdada/grpc,Vizerai/grpc,ejona86/grpc,donnadionne/grpc,chrisdunelm/grpc,vjpai/grpc,jtattermusch/grpc,vjpai/grpc,pszemus/grpc,dgquintas/grpc,jtattermusch/grpc,muxi/grpc,grpc/grpc,ejona86/grpc,jboeuf/grpc,stanley-cheung/grpc,firebase/grpc,pszemus/grpc
c
## Code Before: /* * * Copyright 2017 gRPC authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #ifndef GRPC_IMPL_CODEGEN_FORK_H #define GRPC_IMPL_CODEGEN_FORK_H /** * gRPC applications should call this before calling fork(). There should be no * active gRPC function calls between calling grpc_prefork() and * grpc_postfork_parent()/grpc_postfork_child(). * * * Typical use: * grpc_prefork(); * int pid = fork(); * if (pid) { * grpc_postfork_parent(); * // Parent process.. * } else { * grpc_postfork_child(); * // Child process... * } */ void grpc_prefork(); void grpc_postfork_parent(); void grpc_postfork_child(); void grpc_fork_handlers_auto_register(); #endif /* GRPC_IMPL_CODEGEN_FORK_H */ ## Instruction: Resolve Swift warnings by specifying void arguments The following functions in the `fork.h` file cause a `This function declaration is not a prototype` warning in Swift: ``` void grpc_prefork(void); void grpc_postfork_parent(void); void grpc_postfork_child(void); void grpc_fork_handlers_auto_register(void); ``` Explicitly specifying `void` as the argument resolves the warnings. Reproducible using Xcode 9.2 with `SwiftGRPC`/`gRPC-Core` via CocoaPods. ## Code After: /* * * Copyright 2017 gRPC authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #ifndef GRPC_IMPL_CODEGEN_FORK_H #define GRPC_IMPL_CODEGEN_FORK_H /** * gRPC applications should call this before calling fork(). There should be no * active gRPC function calls between calling grpc_prefork() and * grpc_postfork_parent()/grpc_postfork_child(). * * * Typical use: * grpc_prefork(); * int pid = fork(); * if (pid) { * grpc_postfork_parent(); * // Parent process.. * } else { * grpc_postfork_child(); * // Child process... * } */ void grpc_prefork(void); void grpc_postfork_parent(void); void grpc_postfork_child(void); void grpc_fork_handlers_auto_register(void); #endif /* GRPC_IMPL_CODEGEN_FORK_H */
... * } */ void grpc_prefork(void); void grpc_postfork_parent(void); void grpc_postfork_child(void); void grpc_fork_handlers_auto_register(void); #endif /* GRPC_IMPL_CODEGEN_FORK_H */ ...
2055caf822c9a52c01501585f58efd32b8ed2d1c
main.c
main.c
/* shuffle files in a directory by giving them random names, optionally tacking a global file extension to the end */ #include <stdio.h> #include <stdlib.h> #include <dirent.h> #include <string.h> char *extension = '\0'; DIR *dir; struct dirent *fileInDir; int fileCount = 0; int main(int argc, char **argv){ int exponentialchars = 1; if (argc < 2){ fprintf(stderr, "usage: %s <directory> <optional extension>\n", argv[0]); exit(1); } if (argv[2] != NULL){ extension = argv[2]; } dir = opendir(argv[1]); if (dir != NULL){ while ((fileInDir = readdir(dir)) != NULL){ fileCount++; } } else { perror(argv[1]); exit(2); } while (26**exponentialchars < fileCount){ exponentialchars++; } rewinddir(dir); while ((fileInDir = readdir(dir)) != NULL){ } }
/* shuffle files in a directory by giving them random names, optionally tacking a global file extension to the end */ #include <stdio.h> #include <stdlib.h> #include <dirent.h> #include <string.h> int main(int argc, char **argv){ char *extension = '\0'; DIR *dir = opendir(argv[1]); DIR *dir_nameclobber = opendir(argv[1]); struct dirent *fileInDir; struct dirent *fileInDir_nameclobber; int fileCount = 0; char *newName; if (argc < 2){ fprintf(stderr, "usage: %s <directory> <optional extension>\n", argv[0]); exit(1); } if (argv[2] != NULL){ extension = argv[2]; } if (dir != NULL){ while ((fileInDir = readdir(dir)) != NULL){ newName = tempnam(argv[1], NULL); while ((fileInDir_nameclobber = readdir(dir_nameclobber)) != NULL){ } } } else { perror(argv[1]) exit(2) } }
Remove a bunch of the randomization stuff; the program now uses tempnam(3).
Remove a bunch of the randomization stuff; the program now uses tempnam(3).
C
mit
LordCreepity/dirshuf
c
## Code Before: /* shuffle files in a directory by giving them random names, optionally tacking a global file extension to the end */ #include <stdio.h> #include <stdlib.h> #include <dirent.h> #include <string.h> char *extension = '\0'; DIR *dir; struct dirent *fileInDir; int fileCount = 0; int main(int argc, char **argv){ int exponentialchars = 1; if (argc < 2){ fprintf(stderr, "usage: %s <directory> <optional extension>\n", argv[0]); exit(1); } if (argv[2] != NULL){ extension = argv[2]; } dir = opendir(argv[1]); if (dir != NULL){ while ((fileInDir = readdir(dir)) != NULL){ fileCount++; } } else { perror(argv[1]); exit(2); } while (26**exponentialchars < fileCount){ exponentialchars++; } rewinddir(dir); while ((fileInDir = readdir(dir)) != NULL){ } } ## Instruction: Remove a bunch of the randomization stuff; the program now uses tempnam(3). ## Code After: /* shuffle files in a directory by giving them random names, optionally tacking a global file extension to the end */ #include <stdio.h> #include <stdlib.h> #include <dirent.h> #include <string.h> int main(int argc, char **argv){ char *extension = '\0'; DIR *dir = opendir(argv[1]); DIR *dir_nameclobber = opendir(argv[1]); struct dirent *fileInDir; struct dirent *fileInDir_nameclobber; int fileCount = 0; char *newName; if (argc < 2){ fprintf(stderr, "usage: %s <directory> <optional extension>\n", argv[0]); exit(1); } if (argv[2] != NULL){ extension = argv[2]; } if (dir != NULL){ while ((fileInDir = readdir(dir)) != NULL){ newName = tempnam(argv[1], NULL); while ((fileInDir_nameclobber = readdir(dir_nameclobber)) != NULL){ } } } else { perror(argv[1]) exit(2) } }
... #include <dirent.h> #include <string.h> int main(int argc, char **argv){ char *extension = '\0'; DIR *dir = opendir(argv[1]); DIR *dir_nameclobber = opendir(argv[1]); struct dirent *fileInDir; struct dirent *fileInDir_nameclobber; int fileCount = 0; char *newName; if (argc < 2){ fprintf(stderr, "usage: %s <directory> <optional extension>\n", ... if (argv[2] != NULL){ extension = argv[2]; } if (dir != NULL){ while ((fileInDir = readdir(dir)) != NULL){ newName = tempnam(argv[1], NULL); while ((fileInDir_nameclobber = readdir(dir_nameclobber)) != NULL){ } } } else { perror(argv[1]) exit(2) } } ...