Dataset Viewer
repo
stringlengths 8
50
| commit
stringlengths 40
40
| path
stringlengths 5
171
| lang
stringclasses 5
values | license
stringclasses 13
values | message
stringlengths 21
1.33k
| old_code
stringlengths 15
2.4k
| new_code
stringlengths 140
2.61k
| n_added
int64 0
81
| n_removed
int64 0
58
| n_hunks
int64 1
8
| change_kind
stringclasses 3
values | udiff
stringlengths 88
3.33k
| udiff-h
stringlengths 85
3.32k
| udiff-l
stringlengths 95
3.57k
| search-replace
stringlengths 87
3.19k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
agoda-com/Kakao
|
c31ae715cbba04b6d8a366b53cd101c0bad92159
|
sample/src/androidTest/kotlin/com/agoda/sample/PickersTest.kt
|
kotlin
|
apache-2.0
|
Add test for picker dialogs
|
package com.agoda.sample
|
package com.agoda.sample
import androidx.test.internal.runner.junit4.AndroidJUnit4ClassRunner
import androidx.test.rule.ActivityTestRule
import com.agoda.kakao.screen.Screen
import com.agoda.sample.screen.PickersActivityScreen
import org.junit.Rule
import org.junit.Test
import org.junit.runner.RunWith
@RunWith(AndroidJUnit4ClassRunner::class)
class PickersTest {
@Rule
@JvmField
val rule = ActivityTestRule(PickersActivity::class.java)
@Test
fun testTextInputLayout() {
Screen.onScreen<PickersActivityScreen> {
selectTimeButton {
click()
}
timePicker {
setTime(22, 4)
}
timeText {
hasText("22:4")
}
selectDateButton {
click()
}
datePicker {
setDate(1955, 11, 12)
}
dateText {
hasText("12 11 1955")
}
}
}
}
| 43 | 0 | 1 |
add_only
|
--- a/sample/src/androidTest/kotlin/com/agoda/sample/PickersTest.kt
+++ b/sample/src/androidTest/kotlin/com/agoda/sample/PickersTest.kt
@@ -2 +2,44 @@
+import androidx.test.internal.runner.junit4.AndroidJUnit4ClassRunner
+import androidx.test.rule.ActivityTestRule
+import com.agoda.kakao.screen.Screen
+import com.agoda.sample.screen.PickersActivityScreen
+import org.junit.Rule
+import org.junit.Test
+import org.junit.runner.RunWith
+
+@RunWith(AndroidJUnit4ClassRunner::class)
+class PickersTest {
+ @Rule
+ @JvmField
+ val rule = ActivityTestRule(PickersActivity::class.java)
+
+ @Test
+ fun testTextInputLayout() {
+ Screen.onScreen<PickersActivityScreen> {
+ selectTimeButton {
+ click()
+ }
+
+ timePicker {
+ setTime(22, 4)
+ }
+
+ timeText {
+ hasText("22:4")
+ }
+
+ selectDateButton {
+ click()
+ }
+
+ datePicker {
+ setDate(1955, 11, 12)
+ }
+
+ dateText {
+ hasText("12 11 1955")
+ }
+ }
+ }
+}
|
--- a/sample/src/androidTest/kotlin/com/agoda/sample/PickersTest.kt
+++ b/sample/src/androidTest/kotlin/com/agoda/sample/PickersTest.kt
@@ ... @@
+import androidx.test.internal.runner.junit4.AndroidJUnit4ClassRunner
+import androidx.test.rule.ActivityTestRule
+import com.agoda.kakao.screen.Screen
+import com.agoda.sample.screen.PickersActivityScreen
+import org.junit.Rule
+import org.junit.Test
+import org.junit.runner.RunWith
+
+@RunWith(AndroidJUnit4ClassRunner::class)
+class PickersTest {
+ @Rule
+ @JvmField
+ val rule = ActivityTestRule(PickersActivity::class.java)
+
+ @Test
+ fun testTextInputLayout() {
+ Screen.onScreen<PickersActivityScreen> {
+ selectTimeButton {
+ click()
+ }
+
+ timePicker {
+ setTime(22, 4)
+ }
+
+ timeText {
+ hasText("22:4")
+ }
+
+ selectDateButton {
+ click()
+ }
+
+ datePicker {
+ setDate(1955, 11, 12)
+ }
+
+ dateText {
+ hasText("12 11 1955")
+ }
+ }
+ }
+}
|
--- a/sample/src/androidTest/kotlin/com/agoda/sample/PickersTest.kt
+++ b/sample/src/androidTest/kotlin/com/agoda/sample/PickersTest.kt
@@ -2 +2,44 @@
CON
ADD import androidx.test.internal.runner.junit4.AndroidJUnit4ClassRunner
ADD import androidx.test.rule.ActivityTestRule
ADD import com.agoda.kakao.screen.Screen
ADD import com.agoda.sample.screen.PickersActivityScreen
ADD import org.junit.Rule
ADD import org.junit.Test
ADD import org.junit.runner.RunWith
ADD
ADD @RunWith(AndroidJUnit4ClassRunner::class)
ADD class PickersTest {
ADD @Rule
ADD @JvmField
ADD val rule = ActivityTestRule(PickersActivity::class.java)
ADD
ADD @Test
ADD fun testTextInputLayout() {
ADD Screen.onScreen<PickersActivityScreen> {
ADD selectTimeButton {
ADD click()
ADD }
ADD
ADD timePicker {
ADD setTime(22, 4)
ADD }
ADD
ADD timeText {
ADD hasText("22:4")
ADD }
ADD
ADD selectDateButton {
ADD click()
ADD }
ADD
ADD datePicker {
ADD setDate(1955, 11, 12)
ADD }
ADD
ADD dateText {
ADD hasText("12 11 1955")
ADD }
ADD }
ADD }
ADD }
|
<<<<<<< SEARCH
package com.agoda.sample
=======
package com.agoda.sample
import androidx.test.internal.runner.junit4.AndroidJUnit4ClassRunner
import androidx.test.rule.ActivityTestRule
import com.agoda.kakao.screen.Screen
import com.agoda.sample.screen.PickersActivityScreen
import org.junit.Rule
import org.junit.Test
import org.junit.runner.RunWith
@RunWith(AndroidJUnit4ClassRunner::class)
class PickersTest {
@Rule
@JvmField
val rule = ActivityTestRule(PickersActivity::class.java)
@Test
fun testTextInputLayout() {
Screen.onScreen<PickersActivityScreen> {
selectTimeButton {
click()
}
timePicker {
setTime(22, 4)
}
timeText {
hasText("22:4")
}
selectDateButton {
click()
}
datePicker {
setDate(1955, 11, 12)
}
dateText {
hasText("12 11 1955")
}
}
}
}
>>>>>>> REPLACE
|
garystafford/voter-service
|
f48164f5b521205989b840c5dcf76e9fc43f4c56
|
src/main/java/com/example/voter/HostInfoContributor.java
|
java
|
apache-2.0
|
Add Mongo host info to info endpoint
|
package com.example.voter;
import org.springframework.boot.actuate.info.Info;
import org.springframework.boot.actuate.info.InfoContributor;
import org.springframework.stereotype.Component;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.HashMap;
import java.util.Map;
@Component
public class HostInfoContributor implements InfoContributor {
@Override
public void contribute(Info.Builder builder) {
InetAddress ip = null;
Map<String, String> hostMap = new HashMap<>();
try {
ip = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
e.printStackTrace();
}
hostMap.put("ipAddress", ip.getHostAddress());
hostMap.put("hostname", ip.getHostName());
builder.withDetail("hostInfo", hostMap);
}
}
|
package com.example.voter;
import com.mongodb.CommandResult;
import com.mongodb.MongoClient;
import org.bson.Document;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.info.Info;
import org.springframework.boot.actuate.info.InfoContributor;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.stereotype.Component;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
@Component
public class HostInfoContributor implements InfoContributor {
@Autowired
private MongoTemplate mongoTemplate;
@Override
public void contribute(Info.Builder builder) {
InetAddress ip = null;
Map<String, String> hostMap = new HashMap<>();
try {
ip = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
e.printStackTrace();
}
hostMap.put("ipAddress", ip.getHostAddress());
hostMap.put("hostname", ip.getHostName());
builder.withDetail("appHostInfo", hostMap);
hostMap = new HashMap<>();
CommandResult commandResult = this.mongoTemplate.executeCommand("{ serverStatus: 1 }");
hostMap.put("hostname", commandResult.getString("host"));
MongoClient mongoClient = new MongoClient();
Document buildInfo = mongoClient.getDatabase("admin").runCommand(new Document("currentOp", Boolean.TRUE));
hostMap.put("currentOp", buildInfo.get("inprog", ArrayList.class).get(0).toString());
builder.withDetail("mongoDbHostInfo", hostMap);
}
}
| 20 | 1 | 4 |
mixed
|
--- a/src/main/java/com/example/voter/HostInfoContributor.java
+++ b/src/main/java/com/example/voter/HostInfoContributor.java
@@ -2,4 +2,9 @@
+import com.mongodb.CommandResult;
+import com.mongodb.MongoClient;
+import org.bson.Document;
+import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.info.Info;
import org.springframework.boot.actuate.info.InfoContributor;
+import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.stereotype.Component;
@@ -8,2 +13,3 @@
import java.net.UnknownHostException;
+import java.util.ArrayList;
import java.util.HashMap;
@@ -13,2 +19,5 @@
public class HostInfoContributor implements InfoContributor {
+
+ @Autowired
+ private MongoTemplate mongoTemplate;
@@ -26,3 +35,13 @@
hostMap.put("hostname", ip.getHostName());
- builder.withDetail("hostInfo", hostMap);
+ builder.withDetail("appHostInfo", hostMap);
+
+ hostMap = new HashMap<>();
+ CommandResult commandResult = this.mongoTemplate.executeCommand("{ serverStatus: 1 }");
+ hostMap.put("hostname", commandResult.getString("host"));
+
+ MongoClient mongoClient = new MongoClient();
+ Document buildInfo = mongoClient.getDatabase("admin").runCommand(new Document("currentOp", Boolean.TRUE));
+
+ hostMap.put("currentOp", buildInfo.get("inprog", ArrayList.class).get(0).toString());
+ builder.withDetail("mongoDbHostInfo", hostMap);
}
|
--- a/src/main/java/com/example/voter/HostInfoContributor.java
+++ b/src/main/java/com/example/voter/HostInfoContributor.java
@@ ... @@
+import com.mongodb.CommandResult;
+import com.mongodb.MongoClient;
+import org.bson.Document;
+import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.info.Info;
import org.springframework.boot.actuate.info.InfoContributor;
+import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.stereotype.Component;
@@ ... @@
import java.net.UnknownHostException;
+import java.util.ArrayList;
import java.util.HashMap;
@@ ... @@
public class HostInfoContributor implements InfoContributor {
+
+ @Autowired
+ private MongoTemplate mongoTemplate;
@@ ... @@
hostMap.put("hostname", ip.getHostName());
- builder.withDetail("hostInfo", hostMap);
+ builder.withDetail("appHostInfo", hostMap);
+
+ hostMap = new HashMap<>();
+ CommandResult commandResult = this.mongoTemplate.executeCommand("{ serverStatus: 1 }");
+ hostMap.put("hostname", commandResult.getString("host"));
+
+ MongoClient mongoClient = new MongoClient();
+ Document buildInfo = mongoClient.getDatabase("admin").runCommand(new Document("currentOp", Boolean.TRUE));
+
+ hostMap.put("currentOp", buildInfo.get("inprog", ArrayList.class).get(0).toString());
+ builder.withDetail("mongoDbHostInfo", hostMap);
}
|
--- a/src/main/java/com/example/voter/HostInfoContributor.java
+++ b/src/main/java/com/example/voter/HostInfoContributor.java
@@ -2,4 +2,9 @@
CON
ADD import com.mongodb.CommandResult;
ADD import com.mongodb.MongoClient;
ADD import org.bson.Document;
ADD import org.springframework.beans.factory.annotation.Autowired;
CON import org.springframework.boot.actuate.info.Info;
CON import org.springframework.boot.actuate.info.InfoContributor;
ADD import org.springframework.data.mongodb.core.MongoTemplate;
CON import org.springframework.stereotype.Component;
@@ -8,2 +13,3 @@
CON import java.net.UnknownHostException;
ADD import java.util.ArrayList;
CON import java.util.HashMap;
@@ -13,2 +19,5 @@
CON public class HostInfoContributor implements InfoContributor {
ADD
ADD @Autowired
ADD private MongoTemplate mongoTemplate;
CON
@@ -26,3 +35,13 @@
CON hostMap.put("hostname", ip.getHostName());
DEL builder.withDetail("hostInfo", hostMap);
ADD builder.withDetail("appHostInfo", hostMap);
ADD
ADD hostMap = new HashMap<>();
ADD CommandResult commandResult = this.mongoTemplate.executeCommand("{ serverStatus: 1 }");
ADD hostMap.put("hostname", commandResult.getString("host"));
ADD
ADD MongoClient mongoClient = new MongoClient();
ADD Document buildInfo = mongoClient.getDatabase("admin").runCommand(new Document("currentOp", Boolean.TRUE));
ADD
ADD hostMap.put("currentOp", buildInfo.get("inprog", ArrayList.class).get(0).toString());
ADD builder.withDetail("mongoDbHostInfo", hostMap);
CON }
|
<<<<<<< SEARCH
package com.example.voter;
import org.springframework.boot.actuate.info.Info;
import org.springframework.boot.actuate.info.InfoContributor;
import org.springframework.stereotype.Component;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.HashMap;
import java.util.Map;
@Component
public class HostInfoContributor implements InfoContributor {
@Override
=======
package com.example.voter;
import com.mongodb.CommandResult;
import com.mongodb.MongoClient;
import org.bson.Document;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.info.Info;
import org.springframework.boot.actuate.info.InfoContributor;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.stereotype.Component;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
@Component
public class HostInfoContributor implements InfoContributor {
@Autowired
private MongoTemplate mongoTemplate;
@Override
>>>>>>> REPLACE
|
mindriot101/rust-fitsio
|
deabbce2100cb976a5acd9f4630193c34b79b002
|
fitsio/src/columndescription.rs
|
rust
|
apache-2.0
|
Add implementations of data description types
|
/// Description for new columns
#[derive(Debug, Clone)]
pub struct ColumnDescription {
pub name: String,
// TODO: make this use one of the enums
/// Type of the data, see the cfitsio documentation
pub data_type: String,
}
|
/// Description for new columns
#[derive(Debug, Clone)]
pub struct ColumnDescription {
pub name: String,
// TODO: make this use one of the enums
/// Type of the data, see the cfitsio documentation
pub data_type: String,
}
#[derive(Debug, Clone)]
pub struct ColumnDataDescription {
pub repeat: usize,
pub width: usize,
pub typ: ColumnDataType,
}
impl ColumnDataDescription {
pub fn new(typ: ColumnDataType) -> Self {
ColumnDataDescription {
repeat: 1,
width: 1,
typ: typ,
}
}
/* XXX These two methods force a call to clone which is wasteful of memory. I do not know if
* this means that memory is leaked, or that destructors are needlessly called (I suspect the
* latter) but it is fairly wasteful. On the other hand, it's unlikely this sort of thing will
* be called in performance-critical code, and is more likely a one-dime definition. I will
* leave it for now - SRW 2017-03-07
* */
pub fn repeats(&mut self, repeat: usize) -> Self {
// TODO check that repeat >= 1
self.repeat = repeat;
self.clone()
}
pub fn width(&mut self, width: usize) -> Self {
// TODO check that width >= 1
self.width = width;
self.clone()
}
}
#[derive(Debug, Clone, Copy)]
pub enum ColumnDataType {
Int,
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_column_data_descriptions_builder_pattern() {
let desc = ColumnDataDescription::new(ColumnDataType::Int)
.width(100)
.repeats(5);
assert_eq!(desc.repeat, 5);
assert_eq!(desc.width, 100);
}
}
| 54 | 0 | 1 |
add_only
|
--- a/fitsio/src/columndescription.rs
+++ b/fitsio/src/columndescription.rs
@@ -9 +9,55 @@
}
+
+#[derive(Debug, Clone)]
+pub struct ColumnDataDescription {
+ pub repeat: usize,
+ pub width: usize,
+ pub typ: ColumnDataType,
+}
+
+impl ColumnDataDescription {
+ pub fn new(typ: ColumnDataType) -> Self {
+ ColumnDataDescription {
+ repeat: 1,
+ width: 1,
+ typ: typ,
+ }
+ }
+
+ /* XXX These two methods force a call to clone which is wasteful of memory. I do not know if
+ * this means that memory is leaked, or that destructors are needlessly called (I suspect the
+ * latter) but it is fairly wasteful. On the other hand, it's unlikely this sort of thing will
+ * be called in performance-critical code, and is more likely a one-dime definition. I will
+ * leave it for now - SRW 2017-03-07
+ * */
+ pub fn repeats(&mut self, repeat: usize) -> Self {
+ // TODO check that repeat >= 1
+ self.repeat = repeat;
+ self.clone()
+ }
+
+ pub fn width(&mut self, width: usize) -> Self {
+ // TODO check that width >= 1
+ self.width = width;
+ self.clone()
+ }
+}
+
+#[derive(Debug, Clone, Copy)]
+pub enum ColumnDataType {
+ Int,
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ #[test]
+ fn test_column_data_descriptions_builder_pattern() {
+ let desc = ColumnDataDescription::new(ColumnDataType::Int)
+ .width(100)
+ .repeats(5);
+ assert_eq!(desc.repeat, 5);
+ assert_eq!(desc.width, 100);
+ }
+}
|
--- a/fitsio/src/columndescription.rs
+++ b/fitsio/src/columndescription.rs
@@ ... @@
}
+
+#[derive(Debug, Clone)]
+pub struct ColumnDataDescription {
+ pub repeat: usize,
+ pub width: usize,
+ pub typ: ColumnDataType,
+}
+
+impl ColumnDataDescription {
+ pub fn new(typ: ColumnDataType) -> Self {
+ ColumnDataDescription {
+ repeat: 1,
+ width: 1,
+ typ: typ,
+ }
+ }
+
+ /* XXX These two methods force a call to clone which is wasteful of memory. I do not know if
+ * this means that memory is leaked, or that destructors are needlessly called (I suspect the
+ * latter) but it is fairly wasteful. On the other hand, it's unlikely this sort of thing will
+ * be called in performance-critical code, and is more likely a one-dime definition. I will
+ * leave it for now - SRW 2017-03-07
+ * */
+ pub fn repeats(&mut self, repeat: usize) -> Self {
+ // TODO check that repeat >= 1
+ self.repeat = repeat;
+ self.clone()
+ }
+
+ pub fn width(&mut self, width: usize) -> Self {
+ // TODO check that width >= 1
+ self.width = width;
+ self.clone()
+ }
+}
+
+#[derive(Debug, Clone, Copy)]
+pub enum ColumnDataType {
+ Int,
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ #[test]
+ fn test_column_data_descriptions_builder_pattern() {
+ let desc = ColumnDataDescription::new(ColumnDataType::Int)
+ .width(100)
+ .repeats(5);
+ assert_eq!(desc.repeat, 5);
+ assert_eq!(desc.width, 100);
+ }
+}
|
--- a/fitsio/src/columndescription.rs
+++ b/fitsio/src/columndescription.rs
@@ -9 +9,55 @@
CON }
ADD
ADD #[derive(Debug, Clone)]
ADD pub struct ColumnDataDescription {
ADD pub repeat: usize,
ADD pub width: usize,
ADD pub typ: ColumnDataType,
ADD }
ADD
ADD impl ColumnDataDescription {
ADD pub fn new(typ: ColumnDataType) -> Self {
ADD ColumnDataDescription {
ADD repeat: 1,
ADD width: 1,
ADD typ: typ,
ADD }
ADD }
ADD
ADD /* XXX These two methods force a call to clone which is wasteful of memory. I do not know if
ADD * this means that memory is leaked, or that destructors are needlessly called (I suspect the
ADD * latter) but it is fairly wasteful. On the other hand, it's unlikely this sort of thing will
ADD * be called in performance-critical code, and is more likely a one-dime definition. I will
ADD * leave it for now - SRW 2017-03-07
ADD * */
ADD pub fn repeats(&mut self, repeat: usize) -> Self {
ADD // TODO check that repeat >= 1
ADD self.repeat = repeat;
ADD self.clone()
ADD }
ADD
ADD pub fn width(&mut self, width: usize) -> Self {
ADD // TODO check that width >= 1
ADD self.width = width;
ADD self.clone()
ADD }
ADD }
ADD
ADD #[derive(Debug, Clone, Copy)]
ADD pub enum ColumnDataType {
ADD Int,
ADD }
ADD
ADD #[cfg(test)]
ADD mod test {
ADD use super::*;
ADD
ADD #[test]
ADD fn test_column_data_descriptions_builder_pattern() {
ADD let desc = ColumnDataDescription::new(ColumnDataType::Int)
ADD .width(100)
ADD .repeats(5);
ADD assert_eq!(desc.repeat, 5);
ADD assert_eq!(desc.width, 100);
ADD }
ADD }
|
<<<<<<< SEARCH
pub data_type: String,
}
=======
pub data_type: String,
}
#[derive(Debug, Clone)]
pub struct ColumnDataDescription {
pub repeat: usize,
pub width: usize,
pub typ: ColumnDataType,
}
impl ColumnDataDescription {
pub fn new(typ: ColumnDataType) -> Self {
ColumnDataDescription {
repeat: 1,
width: 1,
typ: typ,
}
}
/* XXX These two methods force a call to clone which is wasteful of memory. I do not know if
* this means that memory is leaked, or that destructors are needlessly called (I suspect the
* latter) but it is fairly wasteful. On the other hand, it's unlikely this sort of thing will
* be called in performance-critical code, and is more likely a one-dime definition. I will
* leave it for now - SRW 2017-03-07
* */
pub fn repeats(&mut self, repeat: usize) -> Self {
// TODO check that repeat >= 1
self.repeat = repeat;
self.clone()
}
pub fn width(&mut self, width: usize) -> Self {
// TODO check that width >= 1
self.width = width;
self.clone()
}
}
#[derive(Debug, Clone, Copy)]
pub enum ColumnDataType {
Int,
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_column_data_descriptions_builder_pattern() {
let desc = ColumnDataDescription::new(ColumnDataType::Int)
.width(100)
.repeats(5);
assert_eq!(desc.repeat, 5);
assert_eq!(desc.width, 100);
}
}
>>>>>>> REPLACE
|
Jitsusama/lets-do-dns
|
f83282b1747e255d35e18e9fecad1750d1564f9e
|
do_record/record.py
|
python
|
apache-2.0
|
Remove Code That Doesn't Have a Test
|
"""DigitalOcean DNS Records."""
from certbot_dns_auth.printer import printer
from do_record import http
class Record(object):
"""Handle DigitalOcean DNS records."""
def __init__(self, api_key, domain, hostname):
self._number = None
self.domain = domain
self.hostname = hostname
self.api_key = api_key
def create(self, value):
"""Create this record on DigitalOcean with the supplied value."""
self._number = http.create(self, value)
return self.number
def delete(self, record_id=None):
"""Delete this record on DigitalOcean, identified by record_id."""
if record_id is None:
record_id = self.number
http.delete(self, record_id)
def printer(self):
printer(self.number)
@property
def number(self):
return self._number
@number.setter
def number(self, value):
if self.number is None:
self._number = value
else:
raise ValueError(
'Cannot externally reset a record\'s number identifier.')
|
"""DigitalOcean DNS Records."""
from certbot_dns_auth.printer import printer
from do_record import http
class Record(object):
"""Handle DigitalOcean DNS records."""
def __init__(self, api_key, domain, hostname):
self._number = None
self.domain = domain
self.hostname = hostname
self.api_key = api_key
def create(self, value):
"""Create this record on DigitalOcean with the supplied value."""
self._number = http.create(self, value)
return self.number
def delete(self, record_id=None):
"""Delete this record on DigitalOcean, identified by record_id."""
if record_id is None:
record_id = self.number
http.delete(self, record_id)
def printer(self):
printer(self.number)
@property
def number(self):
return self._number
@number.setter
def number(self, value):
self._number = value
| 1 | 5 | 1 |
mixed
|
--- a/do_record/record.py
+++ b/do_record/record.py
@@ -35,6 +35,2 @@
def number(self, value):
- if self.number is None:
- self._number = value
- else:
- raise ValueError(
- 'Cannot externally reset a record\'s number identifier.')
+ self._number = value
|
--- a/do_record/record.py
+++ b/do_record/record.py
@@ ... @@
def number(self, value):
- if self.number is None:
- self._number = value
- else:
- raise ValueError(
- 'Cannot externally reset a record\'s number identifier.')
+ self._number = value
|
--- a/do_record/record.py
+++ b/do_record/record.py
@@ -35,6 +35,2 @@
CON def number(self, value):
DEL if self.number is None:
DEL self._number = value
DEL else:
DEL raise ValueError(
DEL 'Cannot externally reset a record\'s number identifier.')
ADD self._number = value
|
<<<<<<< SEARCH
@number.setter
def number(self, value):
if self.number is None:
self._number = value
else:
raise ValueError(
'Cannot externally reset a record\'s number identifier.')
=======
@number.setter
def number(self, value):
self._number = value
>>>>>>> REPLACE
|
jieter/python-lora
|
e5fe2994b05ffbb5abca5641ae75114da315e888
|
setup.py
|
python
|
mit
|
Use twine to upload package
|
#!/usr/bin/env python
import os
import sys
from setuptools import setup
from lora import VERSION
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
if sys.argv[-1] == 'tag':
os.system("git tag -a v{} -m 'tagging v{}'".format(VERSION, VERSION))
os.system('git push && git push --tags')
sys.exit()
setup(
name='python-lora',
version=VERSION,
description='Decrypt LoRa payloads',
url='https://github.com/jieter/python-lora',
author='Jan Pieter Waagmeester',
author_email='[email protected]',
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='LoRa decrypt',
packages=['lora'],
install_requires=[
'cryptography==1.5.2'
],
)
|
#!/usr/bin/env python
import os
import sys
from setuptools import setup
from lora import VERSION
package_name = 'python-lora'
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist')
os.system('twine upload -r pypi dist/%s-%s.tar.gz' % (package_name, VERSION))
sys.exit()
if sys.argv[-1] == 'tag':
os.system("git tag -a v{} -m 'tagging v{}'".format(VERSION, VERSION))
os.system('git push && git push --tags')
sys.exit()
setup(
name='python-lora',
version=VERSION,
description='Decrypt LoRa payloads',
url='https://github.com/jieter/python-lora',
author='Jan Pieter Waagmeester',
author_email='[email protected]',
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='LoRa decrypt',
packages=['lora'],
install_requires=[
'cryptography==1.5.2'
],
)
| 4 | 1 | 1 |
mixed
|
--- a/setup.py
+++ b/setup.py
@@ -9,4 +9,7 @@
+package_name = 'python-lora'
+
if sys.argv[-1] == 'publish':
- os.system('python setup.py sdist upload')
+ os.system('python setup.py sdist')
+ os.system('twine upload -r pypi dist/%s-%s.tar.gz' % (package_name, VERSION))
sys.exit()
|
--- a/setup.py
+++ b/setup.py
@@ ... @@
+package_name = 'python-lora'
+
if sys.argv[-1] == 'publish':
- os.system('python setup.py sdist upload')
+ os.system('python setup.py sdist')
+ os.system('twine upload -r pypi dist/%s-%s.tar.gz' % (package_name, VERSION))
sys.exit()
|
--- a/setup.py
+++ b/setup.py
@@ -9,4 +9,7 @@
CON
ADD package_name = 'python-lora'
ADD
CON if sys.argv[-1] == 'publish':
DEL os.system('python setup.py sdist upload')
ADD os.system('python setup.py sdist')
ADD os.system('twine upload -r pypi dist/%s-%s.tar.gz' % (package_name, VERSION))
CON sys.exit()
|
<<<<<<< SEARCH
from lora import VERSION
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
=======
from lora import VERSION
package_name = 'python-lora'
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist')
os.system('twine upload -r pypi dist/%s-%s.tar.gz' % (package_name, VERSION))
sys.exit()
>>>>>>> REPLACE
|
rrussell39/selenium
|
c0470d7f93fab4bff5364a2d5e55250075cd79df
|
selenium/src/java/org/openqa/selenium/internal/selenesedriver/NewSession.java
|
java
|
apache-2.0
|
SimonStewart: Make the selenium-backed webdriver emulate the normal webdriver's xpath mode
r10674
|
/*
Copyright 2007-2009 WebDriver committers
Copyright 2007-2009 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium.internal.selenesedriver;
import com.thoughtworks.selenium.Selenium;
import org.openqa.selenium.Capabilities;
import org.openqa.selenium.Platform;
import java.util.HashMap;
import java.util.Map;
public class NewSession implements SeleneseFunction<Map<String, Object>> {
public Map<String, Object> apply(Selenium selenium, Map<String, ?> args) {
selenium.start();
Capabilities capabilities = (Capabilities) args.get("desiredCapabilities");
Map<String, Object> seenCapabilities = new HashMap<String, Object>();
seenCapabilities.put("browserName", capabilities.getBrowserName());
seenCapabilities.put("version", capabilities.getVersion());
seenCapabilities.put("platform", Platform.getCurrent().toString());
seenCapabilities.put("javascriptEnabled", true);
return seenCapabilities;
}
}
|
/*
Copyright 2007-2009 WebDriver committers
Copyright 2007-2009 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium.internal.selenesedriver;
import com.thoughtworks.selenium.Selenium;
import org.openqa.selenium.Capabilities;
import org.openqa.selenium.Platform;
import java.util.HashMap;
import java.util.Map;
public class NewSession implements SeleneseFunction<Map<String, Object>> {
public Map<String, Object> apply(Selenium selenium, Map<String, ?> args) {
selenium.start();
// Emulate behaviour of webdriver
selenium.useXpathLibrary("javascript-xpath");
selenium.allowNativeXpath("true");
Capabilities capabilities = (Capabilities) args.get("desiredCapabilities");
Map<String, Object> seenCapabilities = new HashMap<String, Object>();
seenCapabilities.put("browserName", capabilities.getBrowserName());
seenCapabilities.put("version", capabilities.getVersion());
seenCapabilities.put("platform", Platform.getCurrent().toString());
seenCapabilities.put("javascriptEnabled", true);
return seenCapabilities;
}
}
| 5 | 0 | 1 |
add_only
|
--- a/selenium/src/java/org/openqa/selenium/internal/selenesedriver/NewSession.java
+++ b/selenium/src/java/org/openqa/selenium/internal/selenesedriver/NewSession.java
@@ -29,2 +29,7 @@
selenium.start();
+
+ // Emulate behaviour of webdriver
+ selenium.useXpathLibrary("javascript-xpath");
+ selenium.allowNativeXpath("true");
+
Capabilities capabilities = (Capabilities) args.get("desiredCapabilities");
|
--- a/selenium/src/java/org/openqa/selenium/internal/selenesedriver/NewSession.java
+++ b/selenium/src/java/org/openqa/selenium/internal/selenesedriver/NewSession.java
@@ ... @@
selenium.start();
+
+ // Emulate behaviour of webdriver
+ selenium.useXpathLibrary("javascript-xpath");
+ selenium.allowNativeXpath("true");
+
Capabilities capabilities = (Capabilities) args.get("desiredCapabilities");
|
--- a/selenium/src/java/org/openqa/selenium/internal/selenesedriver/NewSession.java
+++ b/selenium/src/java/org/openqa/selenium/internal/selenesedriver/NewSession.java
@@ -29,2 +29,7 @@
CON selenium.start();
ADD
ADD // Emulate behaviour of webdriver
ADD selenium.useXpathLibrary("javascript-xpath");
ADD selenium.allowNativeXpath("true");
ADD
CON Capabilities capabilities = (Capabilities) args.get("desiredCapabilities");
|
<<<<<<< SEARCH
public Map<String, Object> apply(Selenium selenium, Map<String, ?> args) {
selenium.start();
Capabilities capabilities = (Capabilities) args.get("desiredCapabilities");
Map<String, Object> seenCapabilities = new HashMap<String, Object>();
=======
public Map<String, Object> apply(Selenium selenium, Map<String, ?> args) {
selenium.start();
// Emulate behaviour of webdriver
selenium.useXpathLibrary("javascript-xpath");
selenium.allowNativeXpath("true");
Capabilities capabilities = (Capabilities) args.get("desiredCapabilities");
Map<String, Object> seenCapabilities = new HashMap<String, Object>();
>>>>>>> REPLACE
|
endoli/hostinfo.rs
|
eb0f2ee0f33a360cc38b086a86f27b092ea95adb
|
src/lib.rs
|
rust
|
apache-2.0
|
Include core_foundation crate on macOS only.
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Host Info
//!
//! This crate makes available information about the computer,
//! operating system and software that is running.
//!
//! All of the functionality is presented by traits which are
//! implemented by a `HostInfo` struct.
//!
//! ```rust
//! use hostinfo::{HostInfo, UptimeInfo};
//!
//! let hi = HostInfo::new();
//! let uptime = hi.uptime();
//! ```
#![warn(missing_docs)]
#![deny(trivial_numeric_casts, unstable_features,
unused_import_braces, unused_qualifications)]
extern crate core_foundation;
extern crate libc;
mod sys;
pub use self::sys::HostInfo;
mod hardwareinfo;
pub use self::hardwareinfo::HardwareInfo;
mod kernellimits;
pub use self::kernellimits::KernelLimits;
mod powerinfo;
pub use self::powerinfo::{BatteryInfo, PowerInfo, PowerSource};
mod swapinfo;
pub use self::swapinfo::{SwapInfo, SwapUsage};
mod uptimeinfo;
pub use self::uptimeinfo::UptimeInfo;
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Host Info
//!
//! This crate makes available information about the computer,
//! operating system and software that is running.
//!
//! All of the functionality is presented by traits which are
//! implemented by a `HostInfo` struct.
//!
//! ```rust
//! use hostinfo::{HostInfo, UptimeInfo};
//!
//! let hi = HostInfo::new();
//! let uptime = hi.uptime();
//! ```
#![warn(missing_docs)]
#![deny(trivial_numeric_casts, unstable_features,
unused_import_braces, unused_qualifications)]
#[cfg(target_os = "macos")]
extern crate core_foundation;
extern crate libc;
mod sys;
pub use self::sys::HostInfo;
mod hardwareinfo;
pub use self::hardwareinfo::HardwareInfo;
mod kernellimits;
pub use self::kernellimits::KernelLimits;
mod powerinfo;
pub use self::powerinfo::{BatteryInfo, PowerInfo, PowerSource};
mod swapinfo;
pub use self::swapinfo::{SwapInfo, SwapUsage};
mod uptimeinfo;
pub use self::uptimeinfo::UptimeInfo;
| 1 | 0 | 1 |
add_only
|
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -24,2 +24,3 @@
+#[cfg(target_os = "macos")]
extern crate core_foundation;
|
--- a/src/lib.rs
+++ b/src/lib.rs
@@ ... @@
+#[cfg(target_os = "macos")]
extern crate core_foundation;
|
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -24,2 +24,3 @@
CON
ADD #[cfg(target_os = "macos")]
CON extern crate core_foundation;
|
<<<<<<< SEARCH
unused_import_braces, unused_qualifications)]
extern crate core_foundation;
extern crate libc;
=======
unused_import_braces, unused_qualifications)]
#[cfg(target_os = "macos")]
extern crate core_foundation;
extern crate libc;
>>>>>>> REPLACE
|
tock/libtock-rs
|
c877a6c44324b0135017ab1e1300c0d666746c84
|
core/platform/src/syscalls_impl.rs
|
rust
|
apache-2.0
|
Move yield_no_wait's flag init out of `unsafe`.
Turns out core::mem::MaybeUninit::uninit is not `unsafe`!
|
//! Implements `Syscalls` for all types that implement `RawSyscalls`.
use crate::{RawSyscalls, Syscalls, YieldNoWaitReturn};
mod yield_op {
pub const NO_WAIT: u32 = 0;
pub const WAIT: u32 = 1;
}
impl<S: RawSyscalls> Syscalls for S {
// -------------------------------------------------------------------------
// Yield
// -------------------------------------------------------------------------
fn yield_no_wait() -> YieldNoWaitReturn {
unsafe {
// flag can be uninitialized because it is not read before the yield
// system call, and the kernel promises to only write to it (not
// read it).
let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit();
// flag is safe to write a YieldNoWaitReturn to, as guaranteed by
// MaybeUninit.
Self::yield2(yield_op::NO_WAIT as *mut (), flag.as_mut_ptr() as *mut ());
// yield-no-wait guarantees it sets (initializes) flag before
// returning.
flag.assume_init()
}
}
fn yield_wait() {
// Safety: yield-wait does not return a value, which satisfies yield1's
// requirement. The yield-wait system call cannot trigger undefined
// behavior on its own in any other way.
unsafe {
Self::yield1(yield_op::WAIT as *mut ());
}
}
}
|
//! Implements `Syscalls` for all types that implement `RawSyscalls`.
use crate::{RawSyscalls, Syscalls, YieldNoWaitReturn};
mod yield_op {
pub const NO_WAIT: u32 = 0;
pub const WAIT: u32 = 1;
}
impl<S: RawSyscalls> Syscalls for S {
// -------------------------------------------------------------------------
// Yield
// -------------------------------------------------------------------------
fn yield_no_wait() -> YieldNoWaitReturn {
let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit();
unsafe {
// Flag can be uninitialized here because the kernel promises to
// only write to it, not read from it. MaybeUninit guarantees that
// it is safe to write a YieldNoWaitReturn into it.
Self::yield2(yield_op::NO_WAIT as *mut (), flag.as_mut_ptr() as *mut ());
// yield-no-wait guarantees it sets (initializes) flag before
// returning.
flag.assume_init()
}
}
fn yield_wait() {
// Safety: yield-wait does not return a value, which satisfies yield1's
// requirement. The yield-wait system call cannot trigger undefined
// behavior on its own in any other way.
unsafe {
Self::yield1(yield_op::WAIT as *mut ());
}
}
}
| 5 | 7 | 1 |
mixed
|
--- a/core/platform/src/syscalls_impl.rs
+++ b/core/platform/src/syscalls_impl.rs
@@ -15,10 +15,8 @@
fn yield_no_wait() -> YieldNoWaitReturn {
+ let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit();
+
unsafe {
- // flag can be uninitialized because it is not read before the yield
- // system call, and the kernel promises to only write to it (not
- // read it).
- let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit();
-
- // flag is safe to write a YieldNoWaitReturn to, as guaranteed by
- // MaybeUninit.
+ // Flag can be uninitialized here because the kernel promises to
+ // only write to it, not read from it. MaybeUninit guarantees that
+ // it is safe to write a YieldNoWaitReturn into it.
Self::yield2(yield_op::NO_WAIT as *mut (), flag.as_mut_ptr() as *mut ());
|
--- a/core/platform/src/syscalls_impl.rs
+++ b/core/platform/src/syscalls_impl.rs
@@ ... @@
fn yield_no_wait() -> YieldNoWaitReturn {
+ let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit();
+
unsafe {
- // flag can be uninitialized because it is not read before the yield
- // system call, and the kernel promises to only write to it (not
- // read it).
- let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit();
-
- // flag is safe to write a YieldNoWaitReturn to, as guaranteed by
- // MaybeUninit.
+ // Flag can be uninitialized here because the kernel promises to
+ // only write to it, not read from it. MaybeUninit guarantees that
+ // it is safe to write a YieldNoWaitReturn into it.
Self::yield2(yield_op::NO_WAIT as *mut (), flag.as_mut_ptr() as *mut ());
|
--- a/core/platform/src/syscalls_impl.rs
+++ b/core/platform/src/syscalls_impl.rs
@@ -15,10 +15,8 @@
CON fn yield_no_wait() -> YieldNoWaitReturn {
ADD let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit();
ADD
CON unsafe {
DEL // flag can be uninitialized because it is not read before the yield
DEL // system call, and the kernel promises to only write to it (not
DEL // read it).
DEL let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit();
DEL
DEL // flag is safe to write a YieldNoWaitReturn to, as guaranteed by
DEL // MaybeUninit.
ADD // Flag can be uninitialized here because the kernel promises to
ADD // only write to it, not read from it. MaybeUninit guarantees that
ADD // it is safe to write a YieldNoWaitReturn into it.
CON Self::yield2(yield_op::NO_WAIT as *mut (), flag.as_mut_ptr() as *mut ());
|
<<<<<<< SEARCH
fn yield_no_wait() -> YieldNoWaitReturn {
unsafe {
// flag can be uninitialized because it is not read before the yield
// system call, and the kernel promises to only write to it (not
// read it).
let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit();
// flag is safe to write a YieldNoWaitReturn to, as guaranteed by
// MaybeUninit.
Self::yield2(yield_op::NO_WAIT as *mut (), flag.as_mut_ptr() as *mut ());
=======
fn yield_no_wait() -> YieldNoWaitReturn {
let mut flag = core::mem::MaybeUninit::<YieldNoWaitReturn>::uninit();
unsafe {
// Flag can be uninitialized here because the kernel promises to
// only write to it, not read from it. MaybeUninit guarantees that
// it is safe to write a YieldNoWaitReturn into it.
Self::yield2(yield_op::NO_WAIT as *mut (), flag.as_mut_ptr() as *mut ());
>>>>>>> REPLACE
|
hughrawlinson/meyda
|
5ad80c5925dc6b47510fb238d33439192500602c
|
webpack.config.js
|
javascript
|
mit
|
Change libraryTarget to UMD, remove add-module-export plugin
|
var path = require('path');
var webpack = require('webpack');
module.exports = {
regular: {
devtool: 'source-map',
output: {
filename: 'meyda.js',
library: 'Meyda',
libraryTarget: 'var'
},
module: {
rules: [
{
test: /\.js$/,
exclude: /node_modules/,
loader: 'babel-loader',
options: {
presets: ['es2015'],
plugins: [
'add-module-exports'
]
}
}
]
}
},
minified: {
devtool: 'source-map',
output: {
filename: 'meyda.min.js',
sourceMapFilename: 'meyda.min.map',
library: 'Meyda',
libraryTarget: 'var'
},
module: {
rules: [
{
test: /\.js$/,
exclude: /node_modules/,
loader: 'babel-loader',
options: {
presets: ['es2015'],
plugins: [
'add-module-exports'
]
}
}
]
},
plugins: [
new webpack.optimize.UglifyJsPlugin({
compress: {
warnings: true,
drop_console: false
},
sourceMap: true
})
]
}
};
|
var path = require('path');
var webpack = require('webpack');
module.exports = {
regular: {
devtool: 'source-map',
output: {
filename: 'meyda.js',
library: 'Meyda',
libraryTarget: 'umd'
},
module: {
rules: [
{
test: /\.js$/,
exclude: /node_modules/,
loader: 'babel-loader',
options: {
presets: [['es2015', {modules: false}]]
}
}
]
}
},
minified: {
devtool: 'source-map',
output: {
filename: 'meyda.min.js',
sourceMapFilename: 'meyda.min.map',
library: 'Meyda',
libraryTarget: 'umd'
},
module: {
rules: [
{
test: /\.js$/,
exclude: /node_modules/,
loader: 'babel-loader',
options: {
presets: [['es2015', {modules: false}]]
}
}
]
},
plugins: [
new webpack.optimize.UglifyJsPlugin({
compress: {
warnings: true,
drop_console: false
},
sourceMap: true
})
]
}
};
| 4 | 10 | 4 |
mixed
|
--- a/webpack.config.js
+++ b/webpack.config.js
@@ -9,3 +9,3 @@
library: 'Meyda',
- libraryTarget: 'var'
+ libraryTarget: 'umd'
},
@@ -18,6 +18,3 @@
options: {
- presets: ['es2015'],
- plugins: [
- 'add-module-exports'
- ]
+ presets: [['es2015', {modules: false}]]
}
@@ -33,3 +30,3 @@
library: 'Meyda',
- libraryTarget: 'var'
+ libraryTarget: 'umd'
},
@@ -42,6 +39,3 @@
options: {
- presets: ['es2015'],
- plugins: [
- 'add-module-exports'
- ]
+ presets: [['es2015', {modules: false}]]
}
|
--- a/webpack.config.js
+++ b/webpack.config.js
@@ ... @@
library: 'Meyda',
- libraryTarget: 'var'
+ libraryTarget: 'umd'
},
@@ ... @@
options: {
- presets: ['es2015'],
- plugins: [
- 'add-module-exports'
- ]
+ presets: [['es2015', {modules: false}]]
}
@@ ... @@
library: 'Meyda',
- libraryTarget: 'var'
+ libraryTarget: 'umd'
},
@@ ... @@
options: {
- presets: ['es2015'],
- plugins: [
- 'add-module-exports'
- ]
+ presets: [['es2015', {modules: false}]]
}
|
--- a/webpack.config.js
+++ b/webpack.config.js
@@ -9,3 +9,3 @@
CON library: 'Meyda',
DEL libraryTarget: 'var'
ADD libraryTarget: 'umd'
CON },
@@ -18,6 +18,3 @@
CON options: {
DEL presets: ['es2015'],
DEL plugins: [
DEL 'add-module-exports'
DEL ]
ADD presets: [['es2015', {modules: false}]]
CON }
@@ -33,3 +30,3 @@
CON library: 'Meyda',
DEL libraryTarget: 'var'
ADD libraryTarget: 'umd'
CON },
@@ -42,6 +39,3 @@
CON options: {
DEL presets: ['es2015'],
DEL plugins: [
DEL 'add-module-exports'
DEL ]
ADD presets: [['es2015', {modules: false}]]
CON }
|
<<<<<<< SEARCH
filename: 'meyda.js',
library: 'Meyda',
libraryTarget: 'var'
},
module: {
=======
filename: 'meyda.js',
library: 'Meyda',
libraryTarget: 'umd'
},
module: {
>>>>>>> REPLACE
|
jmeas/api-pls
|
acbd1a7b82d756ffd553d8ec23c402e8e6a42571
|
server/api.js
|
javascript
|
mit
|
Add 404 handler to unhandled requests
|
'use strict';
const express = require('express');
const routeBuilder = require('express-routebuilder');
const Resource = require('./resource');
const loadResources = require('./util/load-resources');
module.exports = function() {
const router = express.Router();
// This version needs to be externalized
var apiVersion = 1;
var resources = loadResources()
.map(resource => new Resource({
version: apiVersion,
resource
}));
// Configure routes for our resources.
resources.forEach(resource =>
router.use(routeBuilder(
express.Router(),
resource.routes,
resource.location
))
);
// Set up the root route that describes the available endpoints.
router.get('/', (req, res) => {
res.send({
version: 'v1',
endpoints: resources.map(resource => {
return {
route: resource.location,
methods: Object.keys(resource.routes)
};
})
});
});
return router;
};
|
'use strict';
const express = require('express');
const routeBuilder = require('express-routebuilder');
const Resource = require('./resource');
const serverErrors = require('./util/server-errors');
const loadResources = require('./util/load-resources');
module.exports = function() {
const router = express.Router();
// This version needs to be externalized
var apiVersion = 1;
var resources = loadResources()
.map(resource => new Resource({
version: apiVersion,
resource
}));
// Configure routes for our resources.
resources.forEach(resource =>
router.use(routeBuilder(
express.Router(),
resource.routes,
resource.location
))
);
// Set up the root route that describes the available endpoints.
router.get('/', (req, res) => {
res.send({
version: 'v1',
endpoints: resources.map(resource => {
return {
route: resource.location,
methods: Object.keys(resource.routes)
};
})
});
});
// All other requests get a default 404 error.
router.get('*', (req, res) => {
res.status(serverErrors.notFound.code).send({
errors: [serverErrors.notFound.body()]
});
})
return router;
};
| 8 | 0 | 2 |
add_only
|
--- a/server/api.js
+++ b/server/api.js
@@ -5,2 +5,3 @@
const Resource = require('./resource');
+const serverErrors = require('./util/server-errors');
const loadResources = require('./util/load-resources');
@@ -41,2 +42,9 @@
+ // All other requests get a default 404 error.
+ router.get('*', (req, res) => {
+ res.status(serverErrors.notFound.code).send({
+ errors: [serverErrors.notFound.body()]
+ });
+ })
+
return router;
|
--- a/server/api.js
+++ b/server/api.js
@@ ... @@
const Resource = require('./resource');
+const serverErrors = require('./util/server-errors');
const loadResources = require('./util/load-resources');
@@ ... @@
+ // All other requests get a default 404 error.
+ router.get('*', (req, res) => {
+ res.status(serverErrors.notFound.code).send({
+ errors: [serverErrors.notFound.body()]
+ });
+ })
+
return router;
|
--- a/server/api.js
+++ b/server/api.js
@@ -5,2 +5,3 @@
CON const Resource = require('./resource');
ADD const serverErrors = require('./util/server-errors');
CON const loadResources = require('./util/load-resources');
@@ -41,2 +42,9 @@
CON
ADD // All other requests get a default 404 error.
ADD router.get('*', (req, res) => {
ADD res.status(serverErrors.notFound.code).send({
ADD errors: [serverErrors.notFound.body()]
ADD });
ADD })
ADD
CON return router;
|
<<<<<<< SEARCH
const routeBuilder = require('express-routebuilder');
const Resource = require('./resource');
const loadResources = require('./util/load-resources');
=======
const routeBuilder = require('express-routebuilder');
const Resource = require('./resource');
const serverErrors = require('./util/server-errors');
const loadResources = require('./util/load-resources');
>>>>>>> REPLACE
|
sigp/lighthouse
|
dc0696754be06da921b2536af1771f9bdbc9d200
|
beacon_node/beacon_chain/test_harness/tests/chain.rs
|
rust
|
apache-2.0
|
Raise log level on test_harness tests
|
use env_logger::{Builder, Env};
use log::debug;
use test_harness::BeaconChainHarness;
use types::ChainSpec;
#[test]
fn it_can_build_on_genesis_block() {
Builder::from_env(Env::default().default_filter_or("trace")).init();
let spec = ChainSpec::few_validators();
let validator_count = 8;
let mut harness = BeaconChainHarness::new(spec, validator_count as usize);
harness.advance_chain_with_block();
}
#[test]
#[ignore]
fn it_can_produce_past_first_epoch_boundary() {
Builder::from_env(Env::default().default_filter_or("debug")).init();
let spec = ChainSpec::few_validators();
let validator_count = 8;
debug!("Starting harness build...");
let mut harness = BeaconChainHarness::new(spec, validator_count);
debug!("Harness built, tests starting..");
let blocks = harness.spec.epoch_length * 2 + 1;
for i in 0..blocks {
harness.advance_chain_with_block();
debug!("Produced block {}/{}.", i + 1, blocks);
}
let dump = harness.chain_dump().expect("Chain dump failed.");
assert_eq!(dump.len() as u64, blocks + 1); // + 1 for genesis block.
harness.dump_to_file("/tmp/chaindump.json".to_string(), &dump);
}
|
use env_logger::{Builder, Env};
use log::debug;
use test_harness::BeaconChainHarness;
use types::ChainSpec;
#[test]
fn it_can_build_on_genesis_block() {
Builder::from_env(Env::default().default_filter_or("info")).init();
let spec = ChainSpec::few_validators();
let validator_count = 8;
let mut harness = BeaconChainHarness::new(spec, validator_count as usize);
harness.advance_chain_with_block();
}
#[test]
#[ignore]
fn it_can_produce_past_first_epoch_boundary() {
Builder::from_env(Env::default().default_filter_or("info")).init();
let spec = ChainSpec::few_validators();
let validator_count = 8;
debug!("Starting harness build...");
let mut harness = BeaconChainHarness::new(spec, validator_count);
debug!("Harness built, tests starting..");
let blocks = harness.spec.epoch_length * 2 + 1;
for i in 0..blocks {
harness.advance_chain_with_block();
debug!("Produced block {}/{}.", i + 1, blocks);
}
let dump = harness.chain_dump().expect("Chain dump failed.");
assert_eq!(dump.len() as u64, blocks + 1); // + 1 for genesis block.
harness.dump_to_file("/tmp/chaindump.json".to_string(), &dump);
}
| 2 | 2 | 2 |
mixed
|
--- a/beacon_node/beacon_chain/test_harness/tests/chain.rs
+++ b/beacon_node/beacon_chain/test_harness/tests/chain.rs
@@ -7,3 +7,3 @@
fn it_can_build_on_genesis_block() {
- Builder::from_env(Env::default().default_filter_or("trace")).init();
+ Builder::from_env(Env::default().default_filter_or("info")).init();
@@ -20,3 +20,3 @@
fn it_can_produce_past_first_epoch_boundary() {
- Builder::from_env(Env::default().default_filter_or("debug")).init();
+ Builder::from_env(Env::default().default_filter_or("info")).init();
|
--- a/beacon_node/beacon_chain/test_harness/tests/chain.rs
+++ b/beacon_node/beacon_chain/test_harness/tests/chain.rs
@@ ... @@
fn it_can_build_on_genesis_block() {
- Builder::from_env(Env::default().default_filter_or("trace")).init();
+ Builder::from_env(Env::default().default_filter_or("info")).init();
@@ ... @@
fn it_can_produce_past_first_epoch_boundary() {
- Builder::from_env(Env::default().default_filter_or("debug")).init();
+ Builder::from_env(Env::default().default_filter_or("info")).init();
|
--- a/beacon_node/beacon_chain/test_harness/tests/chain.rs
+++ b/beacon_node/beacon_chain/test_harness/tests/chain.rs
@@ -7,3 +7,3 @@
CON fn it_can_build_on_genesis_block() {
DEL Builder::from_env(Env::default().default_filter_or("trace")).init();
ADD Builder::from_env(Env::default().default_filter_or("info")).init();
CON
@@ -20,3 +20,3 @@
CON fn it_can_produce_past_first_epoch_boundary() {
DEL Builder::from_env(Env::default().default_filter_or("debug")).init();
ADD Builder::from_env(Env::default().default_filter_or("info")).init();
CON
|
<<<<<<< SEARCH
#[test]
fn it_can_build_on_genesis_block() {
Builder::from_env(Env::default().default_filter_or("trace")).init();
let spec = ChainSpec::few_validators();
=======
#[test]
fn it_can_build_on_genesis_block() {
Builder::from_env(Env::default().default_filter_or("info")).init();
let spec = ChainSpec::few_validators();
>>>>>>> REPLACE
|
dkrathi457/app
|
870800d5240e9fdc2f610204fd495c69bfae7f63
|
lib/forgot/forgot-view.js
|
javascript
|
mit
|
Remove unneeded switchOff function from forgot view
|
/*
* Module dependencies.
*/
var template = require('./forgot-form');
var t = require('t');
var FormView = require('form-view');
var page = require('page');
/**
* Expose ForgotView.
*/
module.exports = ForgotView;
/**
* Forgot password view
*
* @return {ForgotView} `ForgotView` instance.
* @api public
*/
function ForgotView() {
if (!(this instanceof ForgotView)) {
return new ForgotView();
};
FormView.call(this, template);
}
/**
* Extend from `FormView`
*/
FormView(ForgotView);
ForgotView.prototype.switchOn = function() {
this.on('success', this.bound('onsuccess'));
this.on('error', this.bound('onerror'));
};
ForgotView.prototype.switchoff = function() {
this.off('success', this.bound('onsuccess'));
this.off('error', this.bound('onerror'));
};
/**
* Show success message
*/
ForgotView.prototype.onsuccess = function() {
var form = this.find('form');
var explanation = this.find('p.explanation-message');
var success = this.find('p.success-message');
form.addClass('hide');
explanation.addClass('hide');
success.removeClass('hide');
}
/**
* Handle errors
*/
ForgotView.prototype.onerror = function(error) {
if ('notvalidated' === error.status) page('/signup/resend-validation-email');
};
|
/*
* Module dependencies.
*/
var template = require('./forgot-form');
var t = require('t');
var FormView = require('form-view');
var page = require('page');
/**
* Expose ForgotView.
*/
module.exports = ForgotView;
/**
* Forgot password view
*
* @return {ForgotView} `ForgotView` instance.
* @api public
*/
function ForgotView() {
if (!(this instanceof ForgotView)) {
return new ForgotView();
};
FormView.call(this, template);
}
/**
* Extend from `FormView`
*/
FormView(ForgotView);
ForgotView.prototype.switchOn = function() {
this.on('success', this.bound('onsuccess'));
this.on('error', this.bound('onerror'));
};
/**
* Show success message
*/
ForgotView.prototype.onsuccess = function() {
var form = this.find('form');
var explanation = this.find('p.explanation-message');
var success = this.find('p.success-message');
form.addClass('hide');
explanation.addClass('hide');
success.removeClass('hide');
}
/**
* Handle errors
*/
ForgotView.prototype.onerror = function(error) {
if ('notvalidated' === error.status) page('/signup/resend-validation-email');
};
| 0 | 5 | 1 |
del_only
|
--- a/lib/forgot/forgot-view.js
+++ b/lib/forgot/forgot-view.js
@@ -41,7 +41,2 @@
-ForgotView.prototype.switchoff = function() {
- this.off('success', this.bound('onsuccess'));
- this.off('error', this.bound('onerror'));
-};
-
/**
|
--- a/lib/forgot/forgot-view.js
+++ b/lib/forgot/forgot-view.js
@@ ... @@
-ForgotView.prototype.switchoff = function() {
- this.off('success', this.bound('onsuccess'));
- this.off('error', this.bound('onerror'));
-};
-
/**
|
--- a/lib/forgot/forgot-view.js
+++ b/lib/forgot/forgot-view.js
@@ -41,7 +41,2 @@
CON
DEL ForgotView.prototype.switchoff = function() {
DEL this.off('success', this.bound('onsuccess'));
DEL this.off('error', this.bound('onerror'));
DEL };
DEL
CON /**
|
<<<<<<< SEARCH
};
ForgotView.prototype.switchoff = function() {
this.off('success', this.bound('onsuccess'));
this.off('error', this.bound('onerror'));
};
/**
* Show success message
=======
};
/**
* Show success message
>>>>>>> REPLACE
|
elpassion/android-commons
|
2ca7bff437f0bf9b90d871689346d774df1a78e0
|
espresso/src/androidTest/java/com/elpassion/android/commons/espresso/HasChildWithTextAssertionsTest.kt
|
kotlin
|
apache-2.0
|
Add does not have ChildWithTextAssertionsTests
|
package com.elpassion.android.commons.espresso
import android.os.Bundle
import android.support.test.rule.ActivityTestRule
import android.widget.Button
import android.widget.FrameLayout
import org.junit.Rule
import org.junit.Test
class HasChildWithTextAssertionsTest {
@JvmField @Rule
val activityRule = ActivityTestRule(Activity::class.java)
@Test
fun shouldConfirmHasChildWithText() {
onId(anId).hasChildWithText(firstText)
}
@Test
fun shouldConfirmHasChildWithTextRes() {
onId(anId).hasChildWithText(secondTextRes)
}
class Activity : android.app.Activity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(FrameLayout(this).apply {
id = anId
addView(Button(this.context).apply {
text = firstText
})
addView(Button(this.context).apply {
setText(secondTextRes)
})
})
}
}
companion object {
private val anId = 124
private val firstText = "text"
private val secondTextRes = R.string.app_name
}
}
|
package com.elpassion.android.commons.espresso
import android.os.Bundle
import android.support.test.rule.ActivityTestRule
import android.widget.Button
import android.widget.FrameLayout
import org.junit.Rule
import org.junit.Test
class HasChildWithTextAssertionsTest {
@JvmField @Rule
val activityRule = ActivityTestRule(Activity::class.java)
@Test
fun shouldConfirmHasChildWithText() {
onId(anId).hasChildWithText(firstText)
}
@Test
fun shouldConfirmHasChildWithTextRes() {
onId(anId).hasChildWithText(secondTextRes)
}
@Test
fun shouldConfirmDoesNotHaveChildWithText() {
onId(anId).doesNotHaveChildWithText("not existing text")
}
@Test
fun shouldConfirmDoesNotHaveChildWithTextRes() {
onId(anId).doesNotHaveChildWithText(123)
}
class Activity : android.app.Activity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(FrameLayout(this).apply {
id = anId
addView(Button(this.context).apply {
text = firstText
})
addView(Button(this.context).apply {
setText(secondTextRes)
})
})
}
}
companion object {
private val anId = 124
private val firstText = "text"
private val secondTextRes = R.string.app_name
}
}
| 10 | 0 | 1 |
add_only
|
--- a/espresso/src/androidTest/java/com/elpassion/android/commons/espresso/HasChildWithTextAssertionsTest.kt
+++ b/espresso/src/androidTest/java/com/elpassion/android/commons/espresso/HasChildWithTextAssertionsTest.kt
@@ -24,2 +24,12 @@
+ @Test
+ fun shouldConfirmDoesNotHaveChildWithText() {
+ onId(anId).doesNotHaveChildWithText("not existing text")
+ }
+
+ @Test
+ fun shouldConfirmDoesNotHaveChildWithTextRes() {
+ onId(anId).doesNotHaveChildWithText(123)
+ }
+
class Activity : android.app.Activity() {
|
--- a/espresso/src/androidTest/java/com/elpassion/android/commons/espresso/HasChildWithTextAssertionsTest.kt
+++ b/espresso/src/androidTest/java/com/elpassion/android/commons/espresso/HasChildWithTextAssertionsTest.kt
@@ ... @@
+ @Test
+ fun shouldConfirmDoesNotHaveChildWithText() {
+ onId(anId).doesNotHaveChildWithText("not existing text")
+ }
+
+ @Test
+ fun shouldConfirmDoesNotHaveChildWithTextRes() {
+ onId(anId).doesNotHaveChildWithText(123)
+ }
+
class Activity : android.app.Activity() {
|
--- a/espresso/src/androidTest/java/com/elpassion/android/commons/espresso/HasChildWithTextAssertionsTest.kt
+++ b/espresso/src/androidTest/java/com/elpassion/android/commons/espresso/HasChildWithTextAssertionsTest.kt
@@ -24,2 +24,12 @@
CON
ADD @Test
ADD fun shouldConfirmDoesNotHaveChildWithText() {
ADD onId(anId).doesNotHaveChildWithText("not existing text")
ADD }
ADD
ADD @Test
ADD fun shouldConfirmDoesNotHaveChildWithTextRes() {
ADD onId(anId).doesNotHaveChildWithText(123)
ADD }
ADD
CON class Activity : android.app.Activity() {
|
<<<<<<< SEARCH
}
class Activity : android.app.Activity() {
override fun onCreate(savedInstanceState: Bundle?) {
=======
}
@Test
fun shouldConfirmDoesNotHaveChildWithText() {
onId(anId).doesNotHaveChildWithText("not existing text")
}
@Test
fun shouldConfirmDoesNotHaveChildWithTextRes() {
onId(anId).doesNotHaveChildWithText(123)
}
class Activity : android.app.Activity() {
override fun onCreate(savedInstanceState: Bundle?) {
>>>>>>> REPLACE
|
vespa-engine/vespa
|
9d46e55257521413d7171958d674a800332840e6
|
config-provisioning/src/main/java/com/yahoo/config/provision/zone/ZoneApi.java
|
java
|
apache-2.0
|
Add method to get SystemName.Environment.RegionName
|
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.config.provision.zone;
import com.yahoo.config.provision.CloudName;
import com.yahoo.config.provision.Environment;
import com.yahoo.config.provision.RegionName;
import com.yahoo.config.provision.SystemName;
/**
* @author hakonhall
*/
public interface ZoneApi {
SystemName getSystemName();
ZoneId getId();
/**
* Returns the virtual ID of this zone. For ordinary zones this is the same as {@link ZoneApi#getId()}, for a
* system represented as a zone this is a fixed ID that is independent of the actual zone ID.
*/
default ZoneId getVirtualId() {
return getId();
}
default Environment getEnvironment() { return getId().environment(); }
default RegionName getRegionName() { return getId().region(); }
CloudName getCloudName();
/** Returns the region name within the cloud, e.g. 'us-east-1' in AWS */
String getCloudNativeRegionName();
}
|
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.config.provision.zone;
import com.yahoo.config.provision.CloudName;
import com.yahoo.config.provision.Environment;
import com.yahoo.config.provision.RegionName;
import com.yahoo.config.provision.SystemName;
/**
* @author hakonhall
*/
public interface ZoneApi {
SystemName getSystemName();
ZoneId getId();
/** Returns the SYSTEM.ENVIRONMENT.REGION string. */
default String getFullName() {
return getSystemName().value() + "." + getEnvironment().value() + "." + getRegionName().value();
}
/**
* Returns the virtual ID of this zone. For ordinary zones this is the same as {@link ZoneApi#getId()}, for a
* system represented as a zone this is a fixed ID that is independent of the actual zone ID.
*/
default ZoneId getVirtualId() {
return getId();
}
default Environment getEnvironment() { return getId().environment(); }
default RegionName getRegionName() { return getId().region(); }
CloudName getCloudName();
/** Returns the region name within the cloud, e.g. 'us-east-1' in AWS */
String getCloudNativeRegionName();
}
| 5 | 0 | 1 |
add_only
|
--- a/config-provisioning/src/main/java/com/yahoo/config/provision/zone/ZoneApi.java
+++ b/config-provisioning/src/main/java/com/yahoo/config/provision/zone/ZoneApi.java
@@ -16,2 +16,7 @@
ZoneId getId();
+
+ /** Returns the SYSTEM.ENVIRONMENT.REGION string. */
+ default String getFullName() {
+ return getSystemName().value() + "." + getEnvironment().value() + "." + getRegionName().value();
+ }
|
--- a/config-provisioning/src/main/java/com/yahoo/config/provision/zone/ZoneApi.java
+++ b/config-provisioning/src/main/java/com/yahoo/config/provision/zone/ZoneApi.java
@@ ... @@
ZoneId getId();
+
+ /** Returns the SYSTEM.ENVIRONMENT.REGION string. */
+ default String getFullName() {
+ return getSystemName().value() + "." + getEnvironment().value() + "." + getRegionName().value();
+ }
|
--- a/config-provisioning/src/main/java/com/yahoo/config/provision/zone/ZoneApi.java
+++ b/config-provisioning/src/main/java/com/yahoo/config/provision/zone/ZoneApi.java
@@ -16,2 +16,7 @@
CON ZoneId getId();
ADD
ADD /** Returns the SYSTEM.ENVIRONMENT.REGION string. */
ADD default String getFullName() {
ADD return getSystemName().value() + "." + getEnvironment().value() + "." + getRegionName().value();
ADD }
CON
|
<<<<<<< SEARCH
ZoneId getId();
/**
=======
ZoneId getId();
/** Returns the SYSTEM.ENVIRONMENT.REGION string. */
default String getFullName() {
return getSystemName().value() + "." + getEnvironment().value() + "." + getRegionName().value();
}
/**
>>>>>>> REPLACE
|
jwilm/strava-rs
|
e59ffa425f1614ce226c66b4d6c341f3126a7062
|
src/api.rs
|
rust
|
mpl-2.0
|
Add paging methods to Paginated
|
use accesstoken::AccessToken;
#[doc(hidden)]
pub fn v3(token: &AccessToken, url: String) -> String {
format!("https://www.strava.com/api/v3/{}?access_token={}", url, token.get())
}
#[derive(Debug)]
pub struct Paginated<T> {
page: i32,
per_page: i32,
url: String,
data: Vec<T>
}
impl<T> Paginated<T> {
pub fn new(url: String, data: Vec<T>) -> Paginated<T> {
Paginated {
page: 1,
per_page: 30,
url: url,
data: data,
}
}
}
|
use accesstoken::AccessToken;
#[doc(hidden)]
pub fn v3(token: &AccessToken, url: String) -> String {
format!("https://www.strava.com/api/v3/{}?access_token={}", url, token.get())
}
/// Wrapper for endpoints that paginate
///
/// A Paginated<T> will be returned from any endpoint that supports paging. Provides methods for
/// fetching the next page and checking if more pages are available.
#[derive(Debug)]
pub struct Paginated<T> {
page: usize,
per_page: usize,
url: String,
data: Vec<T>
}
impl<T> Paginated<T> {
pub fn new(url: String, data: Vec<T>) -> Paginated<T> {
Paginated {
page: 1,
per_page: 30,
url: url,
data: data,
}
}
/// Get the next page of results
pub fn next(&self) -> Option<Paginated<T>> {
unimplemented!();
}
/// Check if this is the last page
pub fn last_page(&self) -> bool {
self.per_page != self.data.len()
}
}
#[cfg(test)]
mod paginated_tests {
use super::Paginated;
#[test]
fn last_page() {
let vec = (0..30).collect::<Vec<u8>>();
let pager = Paginated::new("test".to_string(), vec);
println!("{:?}", pager);
assert_eq!(pager.last_page(), false);
}
}
| 29 | 2 | 2 |
mixed
|
--- a/src/api.rs
+++ b/src/api.rs
@@ -7,6 +7,10 @@
+/// Wrapper for endpoints that paginate
+///
+/// A Paginated<T> will be returned from any endpoint that supports paging. Provides methods for
+/// fetching the next page and checking if more pages are available.
#[derive(Debug)]
pub struct Paginated<T> {
- page: i32,
- per_page: i32,
+ page: usize,
+ per_page: usize,
url: String,
@@ -24,2 +28,25 @@
}
+
+ /// Get the next page of results
+ pub fn next(&self) -> Option<Paginated<T>> {
+ unimplemented!();
+ }
+
+ /// Check if this is the last page
+ pub fn last_page(&self) -> bool {
+ self.per_page != self.data.len()
+ }
}
+
+#[cfg(test)]
+mod paginated_tests {
+ use super::Paginated;
+
+ #[test]
+ fn last_page() {
+ let vec = (0..30).collect::<Vec<u8>>();
+ let pager = Paginated::new("test".to_string(), vec);
+ println!("{:?}", pager);
+ assert_eq!(pager.last_page(), false);
+ }
+}
|
--- a/src/api.rs
+++ b/src/api.rs
@@ ... @@
+/// Wrapper for endpoints that paginate
+///
+/// A Paginated<T> will be returned from any endpoint that supports paging. Provides methods for
+/// fetching the next page and checking if more pages are available.
#[derive(Debug)]
pub struct Paginated<T> {
- page: i32,
- per_page: i32,
+ page: usize,
+ per_page: usize,
url: String,
@@ ... @@
}
+
+ /// Get the next page of results
+ pub fn next(&self) -> Option<Paginated<T>> {
+ unimplemented!();
+ }
+
+ /// Check if this is the last page
+ pub fn last_page(&self) -> bool {
+ self.per_page != self.data.len()
+ }
}
+
+#[cfg(test)]
+mod paginated_tests {
+ use super::Paginated;
+
+ #[test]
+ fn last_page() {
+ let vec = (0..30).collect::<Vec<u8>>();
+ let pager = Paginated::new("test".to_string(), vec);
+ println!("{:?}", pager);
+ assert_eq!(pager.last_page(), false);
+ }
+}
|
--- a/src/api.rs
+++ b/src/api.rs
@@ -7,6 +7,10 @@
CON
ADD /// Wrapper for endpoints that paginate
ADD ///
ADD /// A Paginated<T> will be returned from any endpoint that supports paging. Provides methods for
ADD /// fetching the next page and checking if more pages are available.
CON #[derive(Debug)]
CON pub struct Paginated<T> {
DEL page: i32,
DEL per_page: i32,
ADD page: usize,
ADD per_page: usize,
CON url: String,
@@ -24,2 +28,25 @@
CON }
ADD
ADD /// Get the next page of results
ADD pub fn next(&self) -> Option<Paginated<T>> {
ADD unimplemented!();
ADD }
ADD
ADD /// Check if this is the last page
ADD pub fn last_page(&self) -> bool {
ADD self.per_page != self.data.len()
ADD }
CON }
ADD
ADD #[cfg(test)]
ADD mod paginated_tests {
ADD use super::Paginated;
ADD
ADD #[test]
ADD fn last_page() {
ADD let vec = (0..30).collect::<Vec<u8>>();
ADD let pager = Paginated::new("test".to_string(), vec);
ADD println!("{:?}", pager);
ADD assert_eq!(pager.last_page(), false);
ADD }
ADD }
|
<<<<<<< SEARCH
}
#[derive(Debug)]
pub struct Paginated<T> {
page: i32,
per_page: i32,
url: String,
data: Vec<T>
=======
}
/// Wrapper for endpoints that paginate
///
/// A Paginated<T> will be returned from any endpoint that supports paging. Provides methods for
/// fetching the next page and checking if more pages are available.
#[derive(Debug)]
pub struct Paginated<T> {
page: usize,
per_page: usize,
url: String,
data: Vec<T>
>>>>>>> REPLACE
|
AcapellaSoft/Aconite
|
68efdf458c0985c48ff8a1d9f3d38bed007f5632
|
aconite-core/src/io/aconite/utils/Async.kt
|
kotlin
|
mit
|
Use of COROUTINE_SUSPENDED from Kotlin internals
|
package io.aconite.utils
import java.lang.reflect.InvocationTargetException
import kotlin.coroutines.experimental.Continuation
import kotlin.coroutines.experimental.CoroutineContext
import kotlin.coroutines.experimental.suspendCoroutine
import kotlin.reflect.KFunction
/**
* This object can be used as the return value of the async function to indicate
* that function was suspended. This works only with function [asyncCall].
*/
val COROUTINE_SUSPENDED = Any()
private class MyContinuation<in R>(val c: Continuation<R>): Continuation<R> {
override val context: CoroutineContext
get() = c.context
override fun resume(value: R) {
if (value === COROUTINE_SUSPENDED) return
c.resume(value)
}
override fun resumeWithException(exception: Throwable) {
if (exception === COROUTINE_SUSPENDED) return
c.resumeWithException(exception)
}
}
/**
* Extension for calling asynchronous functions by reflection.
* @receiver the called function
* @param[args] arguments of the called function
* @return result of the called function
*/
suspend fun <R> KFunction<R>.asyncCall(vararg args: Any?) = suspendCoroutine<R> { c ->
val cc = MyContinuation(c)
try {
val r = call(*args, cc)
cc.resume(r)
} catch (ex: InvocationTargetException) {
throw ex.cause ?: ex
}
}
|
package io.aconite.utils
import java.lang.reflect.InvocationTargetException
import kotlin.coroutines.experimental.suspendCoroutine
import kotlin.reflect.KFunction
/**
* This object can be used as the return value of the async function to indicate
* that function was suspended.
* TODO: find better way to use suspend with reflection
*/
val COROUTINE_SUSPENDED: Any = {
val cls = Class.forName("kotlin.coroutines.experimental.intrinsics.IntrinsicsKt")
val field = cls.getDeclaredField("COROUTINE_SUSPENDED")
field.isAccessible = true
field.get(null)
}()
/**
* Extension for calling asynchronous functions by reflection.
* @receiver the called function
* @param[args] arguments of the called function
* @return result of the called function
*/
suspend fun <R> KFunction<R>.asyncCall(vararg args: Any?) = suspendCoroutine<R> { c ->
try {
val r = call(*args, c)
if (r !== COROUTINE_SUSPENDED) c.resume(r)
} catch (ex: InvocationTargetException) {
throw ex.cause ?: ex
}
}
| 10 | 22 | 3 |
mixed
|
--- a/aconite-core/src/io/aconite/utils/Async.kt
+++ b/aconite-core/src/io/aconite/utils/Async.kt
@@ -3,4 +3,2 @@
import java.lang.reflect.InvocationTargetException
-import kotlin.coroutines.experimental.Continuation
-import kotlin.coroutines.experimental.CoroutineContext
import kotlin.coroutines.experimental.suspendCoroutine
@@ -10,20 +8,11 @@
* This object can be used as the return value of the async function to indicate
- * that function was suspended. This works only with function [asyncCall].
+ * that function was suspended.
+ * TODO: find better way to use suspend with reflection
*/
-val COROUTINE_SUSPENDED = Any()
-
-private class MyContinuation<in R>(val c: Continuation<R>): Continuation<R> {
- override val context: CoroutineContext
- get() = c.context
-
- override fun resume(value: R) {
- if (value === COROUTINE_SUSPENDED) return
- c.resume(value)
- }
-
- override fun resumeWithException(exception: Throwable) {
- if (exception === COROUTINE_SUSPENDED) return
- c.resumeWithException(exception)
- }
-}
+val COROUTINE_SUSPENDED: Any = {
+ val cls = Class.forName("kotlin.coroutines.experimental.intrinsics.IntrinsicsKt")
+ val field = cls.getDeclaredField("COROUTINE_SUSPENDED")
+ field.isAccessible = true
+ field.get(null)
+}()
@@ -36,6 +25,5 @@
suspend fun <R> KFunction<R>.asyncCall(vararg args: Any?) = suspendCoroutine<R> { c ->
- val cc = MyContinuation(c)
try {
- val r = call(*args, cc)
- cc.resume(r)
+ val r = call(*args, c)
+ if (r !== COROUTINE_SUSPENDED) c.resume(r)
} catch (ex: InvocationTargetException) {
|
--- a/aconite-core/src/io/aconite/utils/Async.kt
+++ b/aconite-core/src/io/aconite/utils/Async.kt
@@ ... @@
import java.lang.reflect.InvocationTargetException
-import kotlin.coroutines.experimental.Continuation
-import kotlin.coroutines.experimental.CoroutineContext
import kotlin.coroutines.experimental.suspendCoroutine
@@ ... @@
* This object can be used as the return value of the async function to indicate
- * that function was suspended. This works only with function [asyncCall].
+ * that function was suspended.
+ * TODO: find better way to use suspend with reflection
*/
-val COROUTINE_SUSPENDED = Any()
-
-private class MyContinuation<in R>(val c: Continuation<R>): Continuation<R> {
- override val context: CoroutineContext
- get() = c.context
-
- override fun resume(value: R) {
- if (value === COROUTINE_SUSPENDED) return
- c.resume(value)
- }
-
- override fun resumeWithException(exception: Throwable) {
- if (exception === COROUTINE_SUSPENDED) return
- c.resumeWithException(exception)
- }
-}
+val COROUTINE_SUSPENDED: Any = {
+ val cls = Class.forName("kotlin.coroutines.experimental.intrinsics.IntrinsicsKt")
+ val field = cls.getDeclaredField("COROUTINE_SUSPENDED")
+ field.isAccessible = true
+ field.get(null)
+}()
@@ ... @@
suspend fun <R> KFunction<R>.asyncCall(vararg args: Any?) = suspendCoroutine<R> { c ->
- val cc = MyContinuation(c)
try {
- val r = call(*args, cc)
- cc.resume(r)
+ val r = call(*args, c)
+ if (r !== COROUTINE_SUSPENDED) c.resume(r)
} catch (ex: InvocationTargetException) {
|
--- a/aconite-core/src/io/aconite/utils/Async.kt
+++ b/aconite-core/src/io/aconite/utils/Async.kt
@@ -3,4 +3,2 @@
CON import java.lang.reflect.InvocationTargetException
DEL import kotlin.coroutines.experimental.Continuation
DEL import kotlin.coroutines.experimental.CoroutineContext
CON import kotlin.coroutines.experimental.suspendCoroutine
@@ -10,20 +8,11 @@
CON * This object can be used as the return value of the async function to indicate
DEL * that function was suspended. This works only with function [asyncCall].
ADD * that function was suspended.
ADD * TODO: find better way to use suspend with reflection
CON */
DEL val COROUTINE_SUSPENDED = Any()
DEL
DEL private class MyContinuation<in R>(val c: Continuation<R>): Continuation<R> {
DEL override val context: CoroutineContext
DEL get() = c.context
DEL
DEL override fun resume(value: R) {
DEL if (value === COROUTINE_SUSPENDED) return
DEL c.resume(value)
DEL }
DEL
DEL override fun resumeWithException(exception: Throwable) {
DEL if (exception === COROUTINE_SUSPENDED) return
DEL c.resumeWithException(exception)
DEL }
DEL }
ADD val COROUTINE_SUSPENDED: Any = {
ADD val cls = Class.forName("kotlin.coroutines.experimental.intrinsics.IntrinsicsKt")
ADD val field = cls.getDeclaredField("COROUTINE_SUSPENDED")
ADD field.isAccessible = true
ADD field.get(null)
ADD }()
CON
@@ -36,6 +25,5 @@
CON suspend fun <R> KFunction<R>.asyncCall(vararg args: Any?) = suspendCoroutine<R> { c ->
DEL val cc = MyContinuation(c)
CON try {
DEL val r = call(*args, cc)
DEL cc.resume(r)
ADD val r = call(*args, c)
ADD if (r !== COROUTINE_SUSPENDED) c.resume(r)
CON } catch (ex: InvocationTargetException) {
|
<<<<<<< SEARCH
import java.lang.reflect.InvocationTargetException
import kotlin.coroutines.experimental.Continuation
import kotlin.coroutines.experimental.CoroutineContext
import kotlin.coroutines.experimental.suspendCoroutine
import kotlin.reflect.KFunction
/**
* This object can be used as the return value of the async function to indicate
* that function was suspended. This works only with function [asyncCall].
*/
val COROUTINE_SUSPENDED = Any()
private class MyContinuation<in R>(val c: Continuation<R>): Continuation<R> {
override val context: CoroutineContext
get() = c.context
override fun resume(value: R) {
if (value === COROUTINE_SUSPENDED) return
c.resume(value)
}
override fun resumeWithException(exception: Throwable) {
if (exception === COROUTINE_SUSPENDED) return
c.resumeWithException(exception)
}
}
/**
=======
import java.lang.reflect.InvocationTargetException
import kotlin.coroutines.experimental.suspendCoroutine
import kotlin.reflect.KFunction
/**
* This object can be used as the return value of the async function to indicate
* that function was suspended.
* TODO: find better way to use suspend with reflection
*/
val COROUTINE_SUSPENDED: Any = {
val cls = Class.forName("kotlin.coroutines.experimental.intrinsics.IntrinsicsKt")
val field = cls.getDeclaredField("COROUTINE_SUSPENDED")
field.isAccessible = true
field.get(null)
}()
/**
>>>>>>> REPLACE
|
Reinaesaya/OUIRL-ChatBot
|
396ab20874a0c3492482a8ae03fd7d61980917a5
|
chatterbot/adapters/logic/closest_match.py
|
python
|
bsd-3-clause
|
Update closest match adapter docstring.
|
# -*- coding: utf-8 -*-
from fuzzywuzzy import fuzz
from .base_match import BaseMatchAdapter
class ClosestMatchAdapter(BaseMatchAdapter):
"""
The ClosestMatchAdapter logic adapter creates a response by
using fuzzywuzzy's process class to extract the most similar
response to the input. This adapter selects a response to an
input statement by selecting the closest known matching
statement based on the Levenshtein Distance between the text
of each statement.
"""
def get(self, input_statement):
"""
Takes a statement string and a list of statement strings.
Returns the closest matching statement from the list.
"""
statement_list = self.context.storage.get_response_statements()
if not statement_list:
if self.has_storage_context:
# Use a randomly picked statement
self.logger.info(
u'No statements have known responses. ' +
u'Choosing a random response to return.'
)
return 0, self.context.storage.get_random()
else:
raise self.EmptyDatasetException()
confidence = -1
closest_match = input_statement
# Find the closest matching known statement
for statement in statement_list:
ratio = fuzz.ratio(input_statement.text.lower(), statement.text.lower())
if ratio > confidence:
confidence = ratio
closest_match = statement
# Convert the confidence integer to a percent
confidence /= 100.0
return confidence, closest_match
|
# -*- coding: utf-8 -*-
from fuzzywuzzy import fuzz
from .base_match import BaseMatchAdapter
class ClosestMatchAdapter(BaseMatchAdapter):
"""
The ClosestMatchAdapter logic adapter selects a known response
to an input by searching for a known statement that most closely
matches the input based on the Levenshtein Distance between the text
of each statement.
"""
def get(self, input_statement):
"""
Takes a statement string and a list of statement strings.
Returns the closest matching statement from the list.
"""
statement_list = self.context.storage.get_response_statements()
if not statement_list:
if self.has_storage_context:
# Use a randomly picked statement
self.logger.info(
u'No statements have known responses. ' +
u'Choosing a random response to return.'
)
return 0, self.context.storage.get_random()
else:
raise self.EmptyDatasetException()
confidence = -1
closest_match = input_statement
# Find the closest matching known statement
for statement in statement_list:
ratio = fuzz.ratio(input_statement.text.lower(), statement.text.lower())
if ratio > confidence:
confidence = ratio
closest_match = statement
# Convert the confidence integer to a percent
confidence /= 100.0
return confidence, closest_match
| 3 | 6 | 2 |
mixed
|
--- a/chatterbot/adapters/logic/closest_match.py
+++ b/chatterbot/adapters/logic/closest_match.py
@@ -2,3 +2,2 @@
from fuzzywuzzy import fuzz
-
from .base_match import BaseMatchAdapter
@@ -8,7 +7,5 @@
"""
- The ClosestMatchAdapter logic adapter creates a response by
- using fuzzywuzzy's process class to extract the most similar
- response to the input. This adapter selects a response to an
- input statement by selecting the closest known matching
- statement based on the Levenshtein Distance between the text
+ The ClosestMatchAdapter logic adapter selects a known response
+ to an input by searching for a known statement that most closely
+ matches the input based on the Levenshtein Distance between the text
of each statement.
|
--- a/chatterbot/adapters/logic/closest_match.py
+++ b/chatterbot/adapters/logic/closest_match.py
@@ ... @@
from fuzzywuzzy import fuzz
-
from .base_match import BaseMatchAdapter
@@ ... @@
"""
- The ClosestMatchAdapter logic adapter creates a response by
- using fuzzywuzzy's process class to extract the most similar
- response to the input. This adapter selects a response to an
- input statement by selecting the closest known matching
- statement based on the Levenshtein Distance between the text
+ The ClosestMatchAdapter logic adapter selects a known response
+ to an input by searching for a known statement that most closely
+ matches the input based on the Levenshtein Distance between the text
of each statement.
|
--- a/chatterbot/adapters/logic/closest_match.py
+++ b/chatterbot/adapters/logic/closest_match.py
@@ -2,3 +2,2 @@
CON from fuzzywuzzy import fuzz
DEL
CON from .base_match import BaseMatchAdapter
@@ -8,7 +7,5 @@
CON """
DEL The ClosestMatchAdapter logic adapter creates a response by
DEL using fuzzywuzzy's process class to extract the most similar
DEL response to the input. This adapter selects a response to an
DEL input statement by selecting the closest known matching
DEL statement based on the Levenshtein Distance between the text
ADD The ClosestMatchAdapter logic adapter selects a known response
ADD to an input by searching for a known statement that most closely
ADD matches the input based on the Levenshtein Distance between the text
CON of each statement.
|
<<<<<<< SEARCH
# -*- coding: utf-8 -*-
from fuzzywuzzy import fuzz
from .base_match import BaseMatchAdapter
class ClosestMatchAdapter(BaseMatchAdapter):
"""
The ClosestMatchAdapter logic adapter creates a response by
using fuzzywuzzy's process class to extract the most similar
response to the input. This adapter selects a response to an
input statement by selecting the closest known matching
statement based on the Levenshtein Distance between the text
of each statement.
"""
=======
# -*- coding: utf-8 -*-
from fuzzywuzzy import fuzz
from .base_match import BaseMatchAdapter
class ClosestMatchAdapter(BaseMatchAdapter):
"""
The ClosestMatchAdapter logic adapter selects a known response
to an input by searching for a known statement that most closely
matches the input based on the Levenshtein Distance between the text
of each statement.
"""
>>>>>>> REPLACE
|
mchung94/latest-versions
|
b0814b95ea854f7b3f0b9db48ae9beee078c2a30
|
versions/software/openjdk.py
|
python
|
mit
|
Update OpenJDK version to support both 8 and 9.
|
import re
from versions.software.utils import get_command_stderr, get_soup, \
get_text_between
def name():
"""Return the precise name for the software."""
return 'Zulu OpenJDK'
def installed_version():
"""Return the installed version of the jdk, or None if not installed."""
try:
version_string = get_command_stderr(('java', '-version'))
return get_text_between(version_string, '"', '"')
except FileNotFoundError:
pass
def downloadable_version(url):
"""Strip the version out of the Zulu OpenJDK manual download link."""
# example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip
filename = url[url.rfind('/') + 1:]
jdk_version = get_text_between(filename, '-jdk', '-')
version, update = jdk_version.rsplit('.', 1)
return f'1.{version}_{update}'
def latest_version():
"""Return the latest version of Zulu OpenJDK available for download."""
soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/')
if soup:
div = soup.find('div', class_='latest_area')
if div:
zip_filename = re.compile('\.zip$')
tag = div.find('a', class_='r-download', href=zip_filename)
if tag:
return downloadable_version(tag.attrs['href'])
return 'Unknown'
|
import re
from versions.software.utils import get_command_stderr, get_soup, \
get_text_between
def name():
"""Return the precise name for the software."""
return 'Zulu OpenJDK'
def installed_version():
"""Return the installed version of the jdk, or None if not installed."""
try:
version_string = get_command_stderr(('java', '-version'))
# "1.8.0_162" or "9.0.4.1" for example
return get_text_between(version_string, '"', '"')
except FileNotFoundError:
pass
def latest_version():
"""Return the latest version of Zulu OpenJDK available for download."""
installed = installed_version()
soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/')
if soup:
zip_filename = re.compile('\.zip$')
for tag in soup.find_all('a', class_='r-download', href=zip_filename):
filename = tag.attrs['href']
zulu = get_text_between(filename, 'bin/zulu', '-')
jdk = get_text_between(filename, 'jdk', '-')
if (installed is None) or (installed[0] == '9' and zulu[0] == '9'):
return zulu
elif installed[0] == '1' and jdk[0] == installed[2]:
version, update = jdk.rsplit('.', 1)
return f'1.{version}_{update}'
return 'Unknown'
| 12 | 15 | 2 |
mixed
|
--- a/versions/software/openjdk.py
+++ b/versions/software/openjdk.py
@@ -15,2 +15,3 @@
version_string = get_command_stderr(('java', '-version'))
+ # "1.8.0_162" or "9.0.4.1" for example
return get_text_between(version_string, '"', '"')
@@ -20,21 +21,17 @@
-def downloadable_version(url):
- """Strip the version out of the Zulu OpenJDK manual download link."""
- # example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip
- filename = url[url.rfind('/') + 1:]
- jdk_version = get_text_between(filename, '-jdk', '-')
- version, update = jdk_version.rsplit('.', 1)
- return f'1.{version}_{update}'
-
-
def latest_version():
"""Return the latest version of Zulu OpenJDK available for download."""
+ installed = installed_version()
soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/')
if soup:
- div = soup.find('div', class_='latest_area')
- if div:
- zip_filename = re.compile('\.zip$')
- tag = div.find('a', class_='r-download', href=zip_filename)
- if tag:
- return downloadable_version(tag.attrs['href'])
+ zip_filename = re.compile('\.zip$')
+ for tag in soup.find_all('a', class_='r-download', href=zip_filename):
+ filename = tag.attrs['href']
+ zulu = get_text_between(filename, 'bin/zulu', '-')
+ jdk = get_text_between(filename, 'jdk', '-')
+ if (installed is None) or (installed[0] == '9' and zulu[0] == '9'):
+ return zulu
+ elif installed[0] == '1' and jdk[0] == installed[2]:
+ version, update = jdk.rsplit('.', 1)
+ return f'1.{version}_{update}'
return 'Unknown'
|
--- a/versions/software/openjdk.py
+++ b/versions/software/openjdk.py
@@ ... @@
version_string = get_command_stderr(('java', '-version'))
+ # "1.8.0_162" or "9.0.4.1" for example
return get_text_between(version_string, '"', '"')
@@ ... @@
-def downloadable_version(url):
- """Strip the version out of the Zulu OpenJDK manual download link."""
- # example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip
- filename = url[url.rfind('/') + 1:]
- jdk_version = get_text_between(filename, '-jdk', '-')
- version, update = jdk_version.rsplit('.', 1)
- return f'1.{version}_{update}'
-
-
def latest_version():
"""Return the latest version of Zulu OpenJDK available for download."""
+ installed = installed_version()
soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/')
if soup:
- div = soup.find('div', class_='latest_area')
- if div:
- zip_filename = re.compile('\.zip$')
- tag = div.find('a', class_='r-download', href=zip_filename)
- if tag:
- return downloadable_version(tag.attrs['href'])
+ zip_filename = re.compile('\.zip$')
+ for tag in soup.find_all('a', class_='r-download', href=zip_filename):
+ filename = tag.attrs['href']
+ zulu = get_text_between(filename, 'bin/zulu', '-')
+ jdk = get_text_between(filename, 'jdk', '-')
+ if (installed is None) or (installed[0] == '9' and zulu[0] == '9'):
+ return zulu
+ elif installed[0] == '1' and jdk[0] == installed[2]:
+ version, update = jdk.rsplit('.', 1)
+ return f'1.{version}_{update}'
return 'Unknown'
|
--- a/versions/software/openjdk.py
+++ b/versions/software/openjdk.py
@@ -15,2 +15,3 @@
CON version_string = get_command_stderr(('java', '-version'))
ADD # "1.8.0_162" or "9.0.4.1" for example
CON return get_text_between(version_string, '"', '"')
@@ -20,21 +21,17 @@
CON
DEL def downloadable_version(url):
DEL """Strip the version out of the Zulu OpenJDK manual download link."""
DEL # example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip
DEL filename = url[url.rfind('/') + 1:]
DEL jdk_version = get_text_between(filename, '-jdk', '-')
DEL version, update = jdk_version.rsplit('.', 1)
DEL return f'1.{version}_{update}'
DEL
DEL
CON def latest_version():
CON """Return the latest version of Zulu OpenJDK available for download."""
ADD installed = installed_version()
CON soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/')
CON if soup:
DEL div = soup.find('div', class_='latest_area')
DEL if div:
DEL zip_filename = re.compile('\.zip$')
DEL tag = div.find('a', class_='r-download', href=zip_filename)
DEL if tag:
DEL return downloadable_version(tag.attrs['href'])
ADD zip_filename = re.compile('\.zip$')
ADD for tag in soup.find_all('a', class_='r-download', href=zip_filename):
ADD filename = tag.attrs['href']
ADD zulu = get_text_between(filename, 'bin/zulu', '-')
ADD jdk = get_text_between(filename, 'jdk', '-')
ADD if (installed is None) or (installed[0] == '9' and zulu[0] == '9'):
ADD return zulu
ADD elif installed[0] == '1' and jdk[0] == installed[2]:
ADD version, update = jdk.rsplit('.', 1)
ADD return f'1.{version}_{update}'
CON return 'Unknown'
|
<<<<<<< SEARCH
try:
version_string = get_command_stderr(('java', '-version'))
return get_text_between(version_string, '"', '"')
except FileNotFoundError:
pass
def downloadable_version(url):
"""Strip the version out of the Zulu OpenJDK manual download link."""
# example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip
filename = url[url.rfind('/') + 1:]
jdk_version = get_text_between(filename, '-jdk', '-')
version, update = jdk_version.rsplit('.', 1)
return f'1.{version}_{update}'
def latest_version():
"""Return the latest version of Zulu OpenJDK available for download."""
soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/')
if soup:
div = soup.find('div', class_='latest_area')
if div:
zip_filename = re.compile('\.zip$')
tag = div.find('a', class_='r-download', href=zip_filename)
if tag:
return downloadable_version(tag.attrs['href'])
return 'Unknown'
=======
try:
version_string = get_command_stderr(('java', '-version'))
# "1.8.0_162" or "9.0.4.1" for example
return get_text_between(version_string, '"', '"')
except FileNotFoundError:
pass
def latest_version():
"""Return the latest version of Zulu OpenJDK available for download."""
installed = installed_version()
soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/')
if soup:
zip_filename = re.compile('\.zip$')
for tag in soup.find_all('a', class_='r-download', href=zip_filename):
filename = tag.attrs['href']
zulu = get_text_between(filename, 'bin/zulu', '-')
jdk = get_text_between(filename, 'jdk', '-')
if (installed is None) or (installed[0] == '9' and zulu[0] == '9'):
return zulu
elif installed[0] == '1' and jdk[0] == installed[2]:
version, update = jdk.rsplit('.', 1)
return f'1.{version}_{update}'
return 'Unknown'
>>>>>>> REPLACE
|
google/evergreen-checker
|
6b15019a023f26228cf0baeb0e4b1a052987e6ab
|
build.gradle.kts
|
kotlin
|
apache-2.0
|
Update `kotlin_version` from `1.4.21` to `1.5.0`
|
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
extra["kotlin_version"] = "1.4.21"
repositories {
google()
jcenter()
}
dependencies {
classpath("com.android.tools.build:gradle:4.1.2")
classpath(kotlin("gradle-plugin", version = rootProject.extra["kotlin_version"] as String?))
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
tasks.withType<KotlinCompile> {
kotlinOptions {
jvmTarget = "1.8"
}
}
|
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
extra["kotlin_version"] = "1.5.0"
repositories {
google()
jcenter()
}
dependencies {
classpath("com.android.tools.build:gradle:4.1.2")
classpath(kotlin("gradle-plugin", version = rootProject.extra["kotlin_version"] as String?))
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
tasks.withType<KotlinCompile> {
kotlinOptions {
jvmTarget = "1.8"
}
}
| 1 | 1 | 1 |
mixed
|
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -19,3 +19,3 @@
buildscript {
- extra["kotlin_version"] = "1.4.21"
+ extra["kotlin_version"] = "1.5.0"
|
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ ... @@
buildscript {
- extra["kotlin_version"] = "1.4.21"
+ extra["kotlin_version"] = "1.5.0"
|
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -19,3 +19,3 @@
CON buildscript {
DEL extra["kotlin_version"] = "1.4.21"
ADD extra["kotlin_version"] = "1.5.0"
CON
|
<<<<<<< SEARCH
buildscript {
extra["kotlin_version"] = "1.4.21"
repositories {
=======
buildscript {
extra["kotlin_version"] = "1.5.0"
repositories {
>>>>>>> REPLACE
|
dtolnay/syn
|
d9e61a5ebe2d3bc0b1077f304a31bf377d9c83d0
|
tests/test_ty.rs
|
rust
|
apache-2.0
|
Add test for Type containing macro metavariable
|
use syn::Type;
#[test]
fn test_mut_self() {
syn::parse_str::<Type>("fn(mut self)").unwrap();
syn::parse_str::<Type>("fn(mut self: ())").unwrap();
syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err();
syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err();
syn::parse_str::<Type>("fn(mut self::T)").unwrap_err();
}
|
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use quote::quote;
use std::iter::FromIterator;
use syn::Type;
#[test]
fn test_mut_self() {
syn::parse_str::<Type>("fn(mut self)").unwrap();
syn::parse_str::<Type>("fn(mut self: ())").unwrap();
syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err();
syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err();
syn::parse_str::<Type>("fn(mut self::T)").unwrap_err();
}
#[test]
fn test_macro_variable_type() {
// mimics the token stream corresponding to `$ty<T>`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
TokenTree::Punct(Punct::new('<', Spacing::Alone)),
TokenTree::Ident(Ident::new("T", Span::call_site())),
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
]);
snapshot!(tokens as Type, @r###"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "ty",
arguments: PathArguments::AngleBracketed {
args: [
Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
arguments: None,
},
],
},
}),
],
},
},
],
},
}
"###);
}
| 43 | 0 | 2 |
add_only
|
--- a/tests/test_ty.rs
+++ b/tests/test_ty.rs
@@ -1 +1,7 @@
+#[macro_use]
+mod macros;
+
+use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
+use quote::quote;
+use std::iter::FromIterator;
use syn::Type;
@@ -10 +16,38 @@
}
+
+#[test]
+fn test_macro_variable_type() {
+ // mimics the token stream corresponding to `$ty<T>`
+ let tokens = TokenStream::from_iter(vec![
+ TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
+ TokenTree::Punct(Punct::new('<', Spacing::Alone)),
+ TokenTree::Ident(Ident::new("T", Span::call_site())),
+ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
+ ]);
+
+ snapshot!(tokens as Type, @r###"
+ Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "ty",
+ arguments: PathArguments::AngleBracketed {
+ args: [
+ Type(Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "T",
+ arguments: None,
+ },
+ ],
+ },
+ }),
+ ],
+ },
+ },
+ ],
+ },
+ }
+ "###);
+}
|
--- a/tests/test_ty.rs
+++ b/tests/test_ty.rs
@@ ... @@
+#[macro_use]
+mod macros;
+
+use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
+use quote::quote;
+use std::iter::FromIterator;
use syn::Type;
@@ ... @@
}
+
+#[test]
+fn test_macro_variable_type() {
+ // mimics the token stream corresponding to `$ty<T>`
+ let tokens = TokenStream::from_iter(vec![
+ TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
+ TokenTree::Punct(Punct::new('<', Spacing::Alone)),
+ TokenTree::Ident(Ident::new("T", Span::call_site())),
+ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
+ ]);
+
+ snapshot!(tokens as Type, @r###"
+ Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "ty",
+ arguments: PathArguments::AngleBracketed {
+ args: [
+ Type(Type::Path {
+ path: Path {
+ segments: [
+ PathSegment {
+ ident: "T",
+ arguments: None,
+ },
+ ],
+ },
+ }),
+ ],
+ },
+ },
+ ],
+ },
+ }
+ "###);
+}
|
--- a/tests/test_ty.rs
+++ b/tests/test_ty.rs
@@ -1 +1,7 @@
ADD #[macro_use]
ADD mod macros;
ADD
ADD use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
ADD use quote::quote;
ADD use std::iter::FromIterator;
CON use syn::Type;
@@ -10 +16,38 @@
CON }
ADD
ADD #[test]
ADD fn test_macro_variable_type() {
ADD // mimics the token stream corresponding to `$ty<T>`
ADD let tokens = TokenStream::from_iter(vec![
ADD TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
ADD TokenTree::Punct(Punct::new('<', Spacing::Alone)),
ADD TokenTree::Ident(Ident::new("T", Span::call_site())),
ADD TokenTree::Punct(Punct::new('>', Spacing::Alone)),
ADD ]);
ADD
ADD snapshot!(tokens as Type, @r###"
ADD Type::Path {
ADD path: Path {
ADD segments: [
ADD PathSegment {
ADD ident: "ty",
ADD arguments: PathArguments::AngleBracketed {
ADD args: [
ADD Type(Type::Path {
ADD path: Path {
ADD segments: [
ADD PathSegment {
ADD ident: "T",
ADD arguments: None,
ADD },
ADD ],
ADD },
ADD }),
ADD ],
ADD },
ADD },
ADD ],
ADD },
ADD }
ADD "###);
ADD }
|
<<<<<<< SEARCH
use syn::Type;
=======
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use quote::quote;
use std::iter::FromIterator;
use syn::Type;
>>>>>>> REPLACE
|
kvakil/venus
|
a70c35802d37fff0efdf24235963269731ca93d9
|
src/main/kotlin/venus/simulator/impls/ECALLImpl.kt
|
kotlin
|
mit
|
Add terminate with exit code
|
package venus.simulator.impls
import venus.riscv.Instruction
import venus.simulator.Simulator
import venus.simulator.InstructionImplementation
import venus.glue.Renderer
object ECALLImpl : InstructionImplementation {
override operator fun invoke(inst: Instruction, sim: Simulator) {
val which = sim.getReg(10)
when (which) {
1 -> { // print integer
val arg = sim.getReg(11)
Renderer.printConsole(arg)
}
4 -> { // print string
var arg = sim.getReg(11)
var c = sim.loadByte(arg)
arg++
while (c != 0) {
Renderer.printConsole(c.toChar())
c = sim.loadByte(arg)
arg++
}
}
9 -> { // malloc
var bytes = sim.getReg(11)
if (bytes < 0) return
sim.setReg(10, sim.getHeapEnd())
sim.addHeapSpace(bytes)
}
10 -> { // exit
sim.setPC(Int.MAX_VALUE)
return
}
11 -> { // print char
var arg = sim.getReg(11)
Renderer.printConsole(arg.toChar())
}
else -> {
Renderer.printConsole("Invalid ecall ${which}")
}
}
sim.incrementPC(inst.length)
}
}
|
package venus.simulator.impls
import venus.riscv.Instruction
import venus.simulator.Simulator
import venus.simulator.InstructionImplementation
import venus.glue.Renderer
object ECALLImpl : structionImplementation {
override operator fun invoke(inst: Instruction, sim: Simulator) {
val which = sim.getReg(10)
when (which) {
1 -> { // print integer
val arg = sim.getReg(11)
Renderer.printConsole(arg)
}
4 -> { // print string
var arg = sim.getReg(11)
var c = sim.loadByte(arg)
arg++
while (c != 0) {
Renderer.printConsole(c.toChar())
c = sim.loadByte(arg)
arg++
}
}
9 -> { // malloc
var bytes = sim.getReg(11)
if (bytes < 0) return
sim.setReg(10, sim.getHeapEnd())
sim.addHeapSpace(bytes)
}
10 -> { // exit
sim.setPC(Int.MAX_VALUE)
return
}
11 -> { // print char
var arg = sim.getReg(11)
Renderer.printConsole(arg.toChar())
}
17 -> { // terminate with error code
sim.setPC(Int.MAX_VALUE)
val retVal = sim.getReg(11)
Renderer.printConsole("Exited with error code $retVal\n")
}
else -> {
Renderer.printConsole("Invalid ecall ${which}")
}
}
sim.incrementPC(inst.length)
}
}
| 6 | 1 | 2 |
mixed
|
--- a/src/main/kotlin/venus/simulator/impls/ECALLImpl.kt
+++ b/src/main/kotlin/venus/simulator/impls/ECALLImpl.kt
@@ -7,3 +7,3 @@
-object ECALLImpl : InstructionImplementation {
+object ECALLImpl : structionImplementation {
override operator fun invoke(inst: Instruction, sim: Simulator) {
@@ -39,2 +39,7 @@
}
+ 17 -> { // terminate with error code
+ sim.setPC(Int.MAX_VALUE)
+ val retVal = sim.getReg(11)
+ Renderer.printConsole("Exited with error code $retVal\n")
+ }
else -> {
|
--- a/src/main/kotlin/venus/simulator/impls/ECALLImpl.kt
+++ b/src/main/kotlin/venus/simulator/impls/ECALLImpl.kt
@@ ... @@
-object ECALLImpl : InstructionImplementation {
+object ECALLImpl : structionImplementation {
override operator fun invoke(inst: Instruction, sim: Simulator) {
@@ ... @@
}
+ 17 -> { // terminate with error code
+ sim.setPC(Int.MAX_VALUE)
+ val retVal = sim.getReg(11)
+ Renderer.printConsole("Exited with error code $retVal\n")
+ }
else -> {
|
--- a/src/main/kotlin/venus/simulator/impls/ECALLImpl.kt
+++ b/src/main/kotlin/venus/simulator/impls/ECALLImpl.kt
@@ -7,3 +7,3 @@
CON
DEL object ECALLImpl : InstructionImplementation {
ADD object ECALLImpl : structionImplementation {
CON override operator fun invoke(inst: Instruction, sim: Simulator) {
@@ -39,2 +39,7 @@
CON }
ADD 17 -> { // terminate with error code
ADD sim.setPC(Int.MAX_VALUE)
ADD val retVal = sim.getReg(11)
ADD Renderer.printConsole("Exited with error code $retVal\n")
ADD }
CON else -> {
|
<<<<<<< SEARCH
import venus.glue.Renderer
object ECALLImpl : InstructionImplementation {
override operator fun invoke(inst: Instruction, sim: Simulator) {
val which = sim.getReg(10)
=======
import venus.glue.Renderer
object ECALLImpl : structionImplementation {
override operator fun invoke(inst: Instruction, sim: Simulator) {
val which = sim.getReg(10)
>>>>>>> REPLACE
|
carnesen/mathjs-app
|
b710962f59a12613deb5e9197bd166dad19161b1
|
webpack.config.babel.js
|
javascript
|
mit
|
Purge style loader from webpack config
|
import path from 'path'
import webpack from 'webpack'
const { NODE_ENV } = process.env
const production = NODE_ENV === 'production'
const plugins = [
new webpack.DefinePlugin({
'process.env.NODE_ENV': JSON.stringify(NODE_ENV)
})
]
let extension = '.js'
if (production) {
plugins.push(new webpack.optimize.UglifyJsPlugin())
extension = '.min.js'
}
module.exports = [
{
entry: [
'babel-polyfill',
'whatwg-fetch',
path.join(__dirname, 'src', 'browser', 'index.js')
],
output: {
path: path.join(__dirname, 'dist'),
filename: `bundle${extension}`
},
plugins,
module: {
loaders: [
{
test: /.js$/,
loader: 'babel-loader',
exclude: /node_modules/
},
{ test: /\.json$/, loader: 'json-loader' },
{ test: /\.css$/, loader: 'style-loader!css-loader' }
]
}
}
]
|
import path from 'path'
import webpack from 'webpack'
const { NODE_ENV } = process.env
const production = NODE_ENV === 'production'
const plugins = [
new webpack.DefinePlugin({
'process.env.NODE_ENV': JSON.stringify(NODE_ENV)
})
]
let extension = '.js'
if (production) {
plugins.push(new webpack.optimize.UglifyJsPlugin())
extension = '.min.js'
}
module.exports = [
{
entry: [
'babel-polyfill',
'whatwg-fetch',
path.join(__dirname, 'src', 'browser', 'index.js')
],
output: {
path: path.join(__dirname, 'dist'),
filename: `bundle${extension}`
},
plugins,
module: {
loaders: [
{
test: /.js$/,
loader: 'babel-loader',
exclude: /node_modules/
},
{ test: /\.json$/, loader: 'json-loader' }
]
}
}
]
| 1 | 2 | 1 |
mixed
|
--- a/webpack.config.babel.js
+++ b/webpack.config.babel.js
@@ -37,4 +37,3 @@
},
- { test: /\.json$/, loader: 'json-loader' },
- { test: /\.css$/, loader: 'style-loader!css-loader' }
+ { test: /\.json$/, loader: 'json-loader' }
]
|
--- a/webpack.config.babel.js
+++ b/webpack.config.babel.js
@@ ... @@
},
- { test: /\.json$/, loader: 'json-loader' },
- { test: /\.css$/, loader: 'style-loader!css-loader' }
+ { test: /\.json$/, loader: 'json-loader' }
]
|
--- a/webpack.config.babel.js
+++ b/webpack.config.babel.js
@@ -37,4 +37,3 @@
CON },
DEL { test: /\.json$/, loader: 'json-loader' },
DEL { test: /\.css$/, loader: 'style-loader!css-loader' }
ADD { test: /\.json$/, loader: 'json-loader' }
CON ]
|
<<<<<<< SEARCH
exclude: /node_modules/
},
{ test: /\.json$/, loader: 'json-loader' },
{ test: /\.css$/, loader: 'style-loader!css-loader' }
]
}
=======
exclude: /node_modules/
},
{ test: /\.json$/, loader: 'json-loader' }
]
}
>>>>>>> REPLACE
|
rbartoli/react-boilerplate
|
ba9b586cd4567b739cbe94e51b47e1f933ae1242
|
webpack.config.js
|
javascript
|
mit
|
Change log level to info
|
var path = require('path');
var devConfig = {
context: path.join(__dirname, '/app'),
entry: [
'./app.js'
],
output: {
path: path.join(__dirname, '/build/'),
publicPath: '/public/assets/js/',
filename: 'app.js',
},
devtool: 'eval-source-map',
devServer: {
contentBase: 'public',
historyApiFallback: false,
stats: 'errors-only'
},
module: {
loaders: [
{
test: /\.js$/,
exclude: /node_modules/,
loaders: ['react-hot', 'babel'],
},
{
test: /\.css$/,
exclude: /node_modules/,
loaders: ['style', 'css']
},
{
test: /\.scss$/,
exclude: /node_modules/,
loaders: ['style', 'css', 'sass']
},
{
test: /\.(jpg|png|ttf|eot|woff|woff2|svg)$/,
exclude: /node_modules/,
loader: 'url?limit=100000'
}
]
}
}
if (process.env.NODE_ENV === 'production') {
devConfig.devtool = '';
devConfig.devServer = {};
};
module.exports = devConfig;
|
var path = require('path');
var devConfig = {
context: path.join(__dirname, '/app'),
entry: [
'./app.js'
],
output: {
path: path.join(__dirname, '/build/'),
publicPath: '/public/assets/js/',
filename: 'app.js',
},
devtool: 'eval-source-map',
devServer: {
contentBase: 'public',
historyApiFallback: false
},
module: {
loaders: [
{
test: /\.js$/,
exclude: /node_modules/,
loaders: ['react-hot', 'babel'],
},
{
test: /\.css$/,
exclude: /node_modules/,
loaders: ['style', 'css']
},
{
test: /\.scss$/,
exclude: /node_modules/,
loaders: ['style', 'css', 'sass']
},
{
test: /\.(jpg|png|ttf|eot|woff|woff2|svg)$/,
exclude: /node_modules/,
loader: 'url?limit=100000'
}
]
}
}
if (process.env.NODE_ENV === 'production') {
devConfig.devtool = '';
devConfig.devServer = {};
};
module.exports = devConfig;
| 1 | 3 | 1 |
mixed
|
--- a/webpack.config.js
+++ b/webpack.config.js
@@ -15,5 +15,3 @@
contentBase: 'public',
- historyApiFallback: false,
-
- stats: 'errors-only'
+ historyApiFallback: false
},
|
--- a/webpack.config.js
+++ b/webpack.config.js
@@ ... @@
contentBase: 'public',
- historyApiFallback: false,
-
- stats: 'errors-only'
+ historyApiFallback: false
},
|
--- a/webpack.config.js
+++ b/webpack.config.js
@@ -15,5 +15,3 @@
CON contentBase: 'public',
DEL historyApiFallback: false,
DEL
DEL stats: 'errors-only'
ADD historyApiFallback: false
CON },
|
<<<<<<< SEARCH
devServer: {
contentBase: 'public',
historyApiFallback: false,
stats: 'errors-only'
},
module: {
=======
devServer: {
contentBase: 'public',
historyApiFallback: false
},
module: {
>>>>>>> REPLACE
|
androidx/androidx
|
7439b9a7bdf87fca17db852766191cee05fceb37
|
lifecycle/lifecycle-runtime-ktx-lint/src/test/java/androidx/lifecycle/lint/ApiLintVersionsTest.kt
|
kotlin
|
apache-2.0
|
Fix broken Lint version check for lifecycle
Bug: 189211535
Test: ApiLintVersionsTest#versionsCheck
Change-Id: I6b87f10803b615ab6a4e305883b7a5d9f118b77f
|
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.lifecycle.lint
import com.android.tools.lint.client.api.LintClient
import com.android.tools.lint.detector.api.CURRENT_API
import org.junit.Assert
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
@RunWith(JUnit4::class)
class ApiLintVersionsTest {
@Test
fun versionsCheck() {
LintClient.clientName = LintClient.CLIENT_UNIT_TESTS
val registry = LifecycleRuntimeIssueRegistry()
// we hardcode version registry.api to the version that is used to run tests
Assert.assertEquals(CURRENT_API, registry.api)
// Intentionally fails in IDE, because we use different API version in
// studio and command line
Assert.assertEquals(3, registry.minApi)
}
}
|
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.lifecycle.lint
import com.android.tools.lint.client.api.LintClient
import com.android.tools.lint.detector.api.CURRENT_API
import org.junit.Assert
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
@RunWith(JUnit4::class)
class ApiLintVersionsTest {
@Test
fun versionsCheck() {
LintClient.clientName = LintClient.CLIENT_UNIT_TESTS
val registry = LifecycleRuntimeIssueRegistry()
// we hardcode version registry.api to the version that is used to run tests
Assert.assertEquals(CURRENT_API, registry.api)
// Intentionally fails in IDE, because we use different API version in
// studio and command line
Assert.assertEquals(8, registry.minApi)
}
}
| 1 | 1 | 1 |
mixed
|
--- a/lifecycle/lifecycle-runtime-ktx-lint/src/test/java/androidx/lifecycle/lint/ApiLintVersionsTest.kt
+++ b/lifecycle/lifecycle-runtime-ktx-lint/src/test/java/androidx/lifecycle/lint/ApiLintVersionsTest.kt
@@ -37,3 +37,3 @@
// studio and command line
- Assert.assertEquals(3, registry.minApi)
+ Assert.assertEquals(8, registry.minApi)
}
|
--- a/lifecycle/lifecycle-runtime-ktx-lint/src/test/java/androidx/lifecycle/lint/ApiLintVersionsTest.kt
+++ b/lifecycle/lifecycle-runtime-ktx-lint/src/test/java/androidx/lifecycle/lint/ApiLintVersionsTest.kt
@@ ... @@
// studio and command line
- Assert.assertEquals(3, registry.minApi)
+ Assert.assertEquals(8, registry.minApi)
}
|
--- a/lifecycle/lifecycle-runtime-ktx-lint/src/test/java/androidx/lifecycle/lint/ApiLintVersionsTest.kt
+++ b/lifecycle/lifecycle-runtime-ktx-lint/src/test/java/androidx/lifecycle/lint/ApiLintVersionsTest.kt
@@ -37,3 +37,3 @@
CON // studio and command line
DEL Assert.assertEquals(3, registry.minApi)
ADD Assert.assertEquals(8, registry.minApi)
CON }
|
<<<<<<< SEARCH
// Intentionally fails in IDE, because we use different API version in
// studio and command line
Assert.assertEquals(3, registry.minApi)
}
}
=======
// Intentionally fails in IDE, because we use different API version in
// studio and command line
Assert.assertEquals(8, registry.minApi)
}
}
>>>>>>> REPLACE
|
dirvine/rust-utp
|
3a8fa9325a54a4ca4837cc63a577f4cf9c78056c
|
src/lib.rs
|
rust
|
apache-2.0
|
Add example to module documentation.
|
//! Implementation of the Micro Transport Protocol.[^spec]
//!
//! [^spec]: http://www.bittorrent.org/beps/bep_0029.html
// __________ ____ ____
// /_ __/ __ \/ __ \/ __ \
// / / / / / / / / / / / /
// / / / /_/ / /_/ / /_/ /
// /_/ \____/_____/\____/
//
// - Lossy UDP socket for testing purposes: send and receive ops are wrappers
// that stochastically drop or reorder packets.
// - Sending FIN on drop
// - Handle packet loss
// - Path MTU discovery (RFC4821)
#![deny(missing_docs)]
#![feature(core, collections, io)]
extern crate rand;
extern crate time;
#[macro_use] extern crate log;
// Public API
pub use socket::UtpSocket;
pub use stream::UtpStream;
mod util;
mod bit_iterator;
mod packet;
mod socket;
mod stream;
|
//! Implementation of the Micro Transport Protocol.[^spec]
//!
//! [^spec]: http://www.bittorrent.org/beps/bep_0029.html
//! # Examples
//!
//! ```
//! extern crate utp;
//!
//! use utp::UtpStream;
//! use std::io::{Read, Write};
//!
//! fn main() {
//! // Connect to an hypothetical local server running on port 8080
//! let addr = "127.0.0.1:8080";
//! let mut stream = match UtpStream::connect(addr) {
//! Ok(stream) => stream,
//! Err(e) => panic!("{}", e),
//! };
//!
//! // Send a string
//! match stream.write("Hi there!".as_bytes()) {
//! Ok(_) => (),
//! Err(e) => println!("Write failed with {}", e)
//! }
//!
//! // Close the stream
//! match stream.close() {
//! Ok(()) => println!("Connection closed"),
//! Err(e) => println!("{}", e)
//! }
//! }
//! ```
// __________ ____ ____
// /_ __/ __ \/ __ \/ __ \
// / / / / / / / / / / / /
// / / / /_/ / /_/ / /_/ /
// /_/ \____/_____/\____/
//
// - Lossy UDP socket for testing purposes: send and receive ops are wrappers
// that stochastically drop or reorder packets.
// - Sending FIN on drop
// - Handle packet loss
// - Path MTU discovery (RFC4821)
#![deny(missing_docs)]
#![feature(core, collections, io)]
extern crate rand;
extern crate time;
#[macro_use] extern crate log;
// Public API
pub use socket::UtpSocket;
pub use stream::UtpStream;
mod util;
mod bit_iterator;
mod packet;
mod socket;
mod stream;
| 30 | 0 | 1 |
add_only
|
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -3,2 +3,32 @@
//! [^spec]: http://www.bittorrent.org/beps/bep_0029.html
+
+//! # Examples
+//!
+//! ```
+//! extern crate utp;
+//!
+//! use utp::UtpStream;
+//! use std::io::{Read, Write};
+//!
+//! fn main() {
+//! // Connect to an hypothetical local server running on port 8080
+//! let addr = "127.0.0.1:8080";
+//! let mut stream = match UtpStream::connect(addr) {
+//! Ok(stream) => stream,
+//! Err(e) => panic!("{}", e),
+//! };
+//!
+//! // Send a string
+//! match stream.write("Hi there!".as_bytes()) {
+//! Ok(_) => (),
+//! Err(e) => println!("Write failed with {}", e)
+//! }
+//!
+//! // Close the stream
+//! match stream.close() {
+//! Ok(()) => println!("Connection closed"),
+//! Err(e) => println!("{}", e)
+//! }
+//! }
+//! ```
|
--- a/src/lib.rs
+++ b/src/lib.rs
@@ ... @@
//! [^spec]: http://www.bittorrent.org/beps/bep_0029.html
+
+//! # Examples
+//!
+//! ```
+//! extern crate utp;
+//!
+//! use utp::UtpStream;
+//! use std::io::{Read, Write};
+//!
+//! fn main() {
+//! // Connect to an hypothetical local server running on port 8080
+//! let addr = "127.0.0.1:8080";
+//! let mut stream = match UtpStream::connect(addr) {
+//! Ok(stream) => stream,
+//! Err(e) => panic!("{}", e),
+//! };
+//!
+//! // Send a string
+//! match stream.write("Hi there!".as_bytes()) {
+//! Ok(_) => (),
+//! Err(e) => println!("Write failed with {}", e)
+//! }
+//!
+//! // Close the stream
+//! match stream.close() {
+//! Ok(()) => println!("Connection closed"),
+//! Err(e) => println!("{}", e)
+//! }
+//! }
+//! ```
|
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -3,2 +3,32 @@
CON //! [^spec]: http://www.bittorrent.org/beps/bep_0029.html
ADD
ADD //! # Examples
ADD //!
ADD //! ```
ADD //! extern crate utp;
ADD //!
ADD //! use utp::UtpStream;
ADD //! use std::io::{Read, Write};
ADD //!
ADD //! fn main() {
ADD //! // Connect to an hypothetical local server running on port 8080
ADD //! let addr = "127.0.0.1:8080";
ADD //! let mut stream = match UtpStream::connect(addr) {
ADD //! Ok(stream) => stream,
ADD //! Err(e) => panic!("{}", e),
ADD //! };
ADD //!
ADD //! // Send a string
ADD //! match stream.write("Hi there!".as_bytes()) {
ADD //! Ok(_) => (),
ADD //! Err(e) => println!("Write failed with {}", e)
ADD //! }
ADD //!
ADD //! // Close the stream
ADD //! match stream.close() {
ADD //! Ok(()) => println!("Connection closed"),
ADD //! Err(e) => println!("{}", e)
ADD //! }
ADD //! }
ADD //! ```
CON
|
<<<<<<< SEARCH
//!
//! [^spec]: http://www.bittorrent.org/beps/bep_0029.html
// __________ ____ ____
=======
//!
//! [^spec]: http://www.bittorrent.org/beps/bep_0029.html
//! # Examples
//!
//! ```
//! extern crate utp;
//!
//! use utp::UtpStream;
//! use std::io::{Read, Write};
//!
//! fn main() {
//! // Connect to an hypothetical local server running on port 8080
//! let addr = "127.0.0.1:8080";
//! let mut stream = match UtpStream::connect(addr) {
//! Ok(stream) => stream,
//! Err(e) => panic!("{}", e),
//! };
//!
//! // Send a string
//! match stream.write("Hi there!".as_bytes()) {
//! Ok(_) => (),
//! Err(e) => println!("Write failed with {}", e)
//! }
//!
//! // Close the stream
//! match stream.close() {
//! Ok(()) => println!("Connection closed"),
//! Err(e) => println!("{}", e)
//! }
//! }
//! ```
// __________ ____ ____
>>>>>>> REPLACE
|
orekyuu/intellij-community
|
5a9210545798d7590ab786fb49f82078b3a9afc6
|
runtimesource/com/intellij/rt/execution/junit2/RunOnce.java
|
java
|
apache-2.0
|
Upgrade to JUnit 4.0: Fixing Vector -> List update consequences.
|
package com.intellij.rt.execution.junit2;
import junit.framework.TestResult;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import junit.framework.Test;
import java.util.Hashtable;
import java.util.Enumeration;
public class RunOnce extends TestResult {
private Hashtable myPeformedTests = new Hashtable();
private static final String NOT_ALLOWED_IN_ID = ":";
protected void run(TestCase test) {
if (test.getClass().getName().startsWith(TestSuite.class.getName())) {
super.run(test);
} else {
String testKey = keyOf(test);
if (!myPeformedTests.containsKey(testKey)) {
super.run(test);
myPeformedTests.put(testKey, test);
} else {
fireTestSkipped(test, (Test)myPeformedTests.get(testKey));
}
}
}
private void fireTestSkipped(TestCase test, Test peformedTest) {
for (Enumeration each = fListeners.elements(); each.hasMoreElements();) {
Object listener = each.nextElement();
if (listener instanceof TestSkippingListener)
((TestSkippingListener)listener).onTestSkipped(test, peformedTest);
}
}
private String keyOf(TestCase test) {
return test.getClass().getName() + NOT_ALLOWED_IN_ID +
test.getName() + NOT_ALLOWED_IN_ID +
test.toString();
}
}
|
package com.intellij.rt.execution.junit2;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestResult;
import junit.framework.TestSuite;
import java.util.Hashtable;
public class RunOnce extends TestResult {
private Hashtable myPeformedTests = new Hashtable();
private static final String NOT_ALLOWED_IN_ID = ":";
protected void run(TestCase test) {
if (test.getClass().getName().startsWith(TestSuite.class.getName())) {
super.run(test);
} else {
String testKey = keyOf(test);
if (!myPeformedTests.containsKey(testKey)) {
super.run(test);
myPeformedTests.put(testKey, test);
} else {
fireTestSkipped(test, (Test)myPeformedTests.get(testKey));
}
}
}
private void fireTestSkipped(TestCase test, Test peformedTest) {
for (int i = 0; i < fListeners.size(); i++) {
Object listener = fListeners.get(i);
if (listener instanceof TestSkippingListener) {
((TestSkippingListener)listener).onTestSkipped(test, peformedTest);
}
}
}
private String keyOf(TestCase test) {
return test.getClass().getName() + NOT_ALLOWED_IN_ID +
test.getName() + NOT_ALLOWED_IN_ID +
test.toString();
}
}
| 8 | 8 | 3 |
mixed
|
--- a/runtimesource/com/intellij/rt/execution/junit2/RunOnce.java
+++ b/runtimesource/com/intellij/rt/execution/junit2/RunOnce.java
@@ -2,9 +2,8 @@
+import junit.framework.Test;
+import junit.framework.TestCase;
import junit.framework.TestResult;
-import junit.framework.TestCase;
import junit.framework.TestSuite;
-import junit.framework.Test;
import java.util.Hashtable;
-import java.util.Enumeration;
@@ -29,6 +28,7 @@
private void fireTestSkipped(TestCase test, Test peformedTest) {
- for (Enumeration each = fListeners.elements(); each.hasMoreElements();) {
- Object listener = each.nextElement();
- if (listener instanceof TestSkippingListener)
+ for (int i = 0; i < fListeners.size(); i++) {
+ Object listener = fListeners.get(i);
+ if (listener instanceof TestSkippingListener) {
((TestSkippingListener)listener).onTestSkipped(test, peformedTest);
+ }
}
@@ -38,4 +38,4 @@
return test.getClass().getName() + NOT_ALLOWED_IN_ID +
- test.getName() + NOT_ALLOWED_IN_ID +
- test.toString();
+ test.getName() + NOT_ALLOWED_IN_ID +
+ test.toString();
}
|
--- a/runtimesource/com/intellij/rt/execution/junit2/RunOnce.java
+++ b/runtimesource/com/intellij/rt/execution/junit2/RunOnce.java
@@ ... @@
+import junit.framework.Test;
+import junit.framework.TestCase;
import junit.framework.TestResult;
-import junit.framework.TestCase;
import junit.framework.TestSuite;
-import junit.framework.Test;
import java.util.Hashtable;
-import java.util.Enumeration;
@@ ... @@
private void fireTestSkipped(TestCase test, Test peformedTest) {
- for (Enumeration each = fListeners.elements(); each.hasMoreElements();) {
- Object listener = each.nextElement();
- if (listener instanceof TestSkippingListener)
+ for (int i = 0; i < fListeners.size(); i++) {
+ Object listener = fListeners.get(i);
+ if (listener instanceof TestSkippingListener) {
((TestSkippingListener)listener).onTestSkipped(test, peformedTest);
+ }
}
@@ ... @@
return test.getClass().getName() + NOT_ALLOWED_IN_ID +
- test.getName() + NOT_ALLOWED_IN_ID +
- test.toString();
+ test.getName() + NOT_ALLOWED_IN_ID +
+ test.toString();
}
|
--- a/runtimesource/com/intellij/rt/execution/junit2/RunOnce.java
+++ b/runtimesource/com/intellij/rt/execution/junit2/RunOnce.java
@@ -2,9 +2,8 @@
CON
ADD import junit.framework.Test;
ADD import junit.framework.TestCase;
CON import junit.framework.TestResult;
DEL import junit.framework.TestCase;
CON import junit.framework.TestSuite;
DEL import junit.framework.Test;
CON
CON import java.util.Hashtable;
DEL import java.util.Enumeration;
CON
@@ -29,6 +28,7 @@
CON private void fireTestSkipped(TestCase test, Test peformedTest) {
DEL for (Enumeration each = fListeners.elements(); each.hasMoreElements();) {
DEL Object listener = each.nextElement();
DEL if (listener instanceof TestSkippingListener)
ADD for (int i = 0; i < fListeners.size(); i++) {
ADD Object listener = fListeners.get(i);
ADD if (listener instanceof TestSkippingListener) {
CON ((TestSkippingListener)listener).onTestSkipped(test, peformedTest);
ADD }
CON }
@@ -38,4 +38,4 @@
CON return test.getClass().getName() + NOT_ALLOWED_IN_ID +
DEL test.getName() + NOT_ALLOWED_IN_ID +
DEL test.toString();
ADD test.getName() + NOT_ALLOWED_IN_ID +
ADD test.toString();
CON }
|
<<<<<<< SEARCH
package com.intellij.rt.execution.junit2;
import junit.framework.TestResult;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import junit.framework.Test;
import java.util.Hashtable;
import java.util.Enumeration;
public class RunOnce extends TestResult {
=======
package com.intellij.rt.execution.junit2;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestResult;
import junit.framework.TestSuite;
import java.util.Hashtable;
public class RunOnce extends TestResult {
>>>>>>> REPLACE
|
ATLauncher/Discord-Bot
|
2bd205f87133ac7b4406514964a35d5d0758e02e
|
src/watchers/TextSpamWatcher.js
|
javascript
|
mit
|
Add in new Kazuto Kirigia spam
|
import BaseWatcher from './BaseWatcher';
import config from '../config';
/**
* This checks for people spamming text stuff.
*/
class TextSpamWatcher extends BaseWatcher {
usesBypassRules = true;
/**
* The method this watcher should listen on.
*
* @type {string[]}
*/
method = [
'message',
'messageUpdate'
];
async action(method, message, updatedMessage) {
let messageToActUpon = message;
if (method === 'messageUpdate') {
messageToActUpon = updatedMessage;
}
const rulesChannel = this.bot.channels.find((channel) => (channel.name === config.rules_channel));
const cleanMessage = messageToActUpon.cleanContent.toLowerCase();
if (
cleanMessage.indexOf('this is cooldog') !== -1 ||
cleanMessage.indexOf('this is memedog') !== -1 ||
cleanMessage.indexOf('chrisopeer davies') !== -1 ||
cleanMessage.indexOf('jessica davies') !== -1 ||
cleanMessage.indexOf('DMing inappropriate photos of underage children') !== -1 ||
cleanMessage.indexOf('bots are joining servers and sending mass') !== -1
) {
const warningMessage = await messageToActUpon.reply(
`Please read the ${rulesChannel} channel. Spamming or encouraging spamming is not allowed.`
);
this.addWarningToUser(messageToActUpon);
messageToActUpon.delete();
warningMessage.delete(60000);
}
}
}
export default TextSpamWatcher;
|
import BaseWatcher from './BaseWatcher';
import config from '../config';
/**
* This checks for people spamming text stuff.
*/
class TextSpamWatcher extends BaseWatcher {
usesBypassRules = true;
/**
* The method this watcher should listen on.
*
* @type {string[]}
*/
method = [
'message',
'messageUpdate'
];
async action(method, message, updatedMessage) {
let messageToActUpon = message;
if (method === 'messageUpdate') {
messageToActUpon = updatedMessage;
}
const rulesChannel = this.bot.channels.find((channel) => (channel.name === config.rules_channel));
const cleanMessage = messageToActUpon.cleanContent.toLowerCase();
if (
cleanMessage.indexOf('this is cooldog') !== -1 ||
cleanMessage.indexOf('this is memedog') !== -1 ||
cleanMessage.indexOf('chrisopeer davies') !== -1 ||
cleanMessage.indexOf('jessica davies') !== -1 ||
cleanMessage.indexOf('DMing inappropriate photos of underage children') !== -1 ||
cleanMessage.indexOf('bots are joining servers and sending mass') !== -1 ||
cleanMessage.indexOf('kazuto kirigia') !== -1
) {
const warningMessage = await messageToActUpon.reply(
`Please read the ${rulesChannel} channel. Spamming or encouraging spamming is not allowed.`
);
this.addWarningToUser(messageToActUpon);
messageToActUpon.delete();
warningMessage.delete(60000);
}
}
}
export default TextSpamWatcher;
| 2 | 1 | 1 |
mixed
|
--- a/src/watchers/TextSpamWatcher.js
+++ b/src/watchers/TextSpamWatcher.js
@@ -37,3 +37,4 @@
cleanMessage.indexOf('DMing inappropriate photos of underage children') !== -1 ||
- cleanMessage.indexOf('bots are joining servers and sending mass') !== -1
+ cleanMessage.indexOf('bots are joining servers and sending mass') !== -1 ||
+ cleanMessage.indexOf('kazuto kirigia') !== -1
) {
|
--- a/src/watchers/TextSpamWatcher.js
+++ b/src/watchers/TextSpamWatcher.js
@@ ... @@
cleanMessage.indexOf('DMing inappropriate photos of underage children') !== -1 ||
- cleanMessage.indexOf('bots are joining servers and sending mass') !== -1
+ cleanMessage.indexOf('bots are joining servers and sending mass') !== -1 ||
+ cleanMessage.indexOf('kazuto kirigia') !== -1
) {
|
--- a/src/watchers/TextSpamWatcher.js
+++ b/src/watchers/TextSpamWatcher.js
@@ -37,3 +37,4 @@
CON cleanMessage.indexOf('DMing inappropriate photos of underage children') !== -1 ||
DEL cleanMessage.indexOf('bots are joining servers and sending mass') !== -1
ADD cleanMessage.indexOf('bots are joining servers and sending mass') !== -1 ||
ADD cleanMessage.indexOf('kazuto kirigia') !== -1
CON ) {
|
<<<<<<< SEARCH
cleanMessage.indexOf('jessica davies') !== -1 ||
cleanMessage.indexOf('DMing inappropriate photos of underage children') !== -1 ||
cleanMessage.indexOf('bots are joining servers and sending mass') !== -1
) {
const warningMessage = await messageToActUpon.reply(
=======
cleanMessage.indexOf('jessica davies') !== -1 ||
cleanMessage.indexOf('DMing inappropriate photos of underage children') !== -1 ||
cleanMessage.indexOf('bots are joining servers and sending mass') !== -1 ||
cleanMessage.indexOf('kazuto kirigia') !== -1
) {
const warningMessage = await messageToActUpon.reply(
>>>>>>> REPLACE
|
dshaps10/full-stack-demo-site
|
5d35cfe5b2655eca9e60c382a13ad092c3e99df4
|
server/server.js
|
javascript
|
mit
|
Add routes for posting new products and retrieving list of products
|
// npm dependencies
const express = require('express');
const hbs = require('hbs');
// local packages
let {mongoose} = require('./db/mongoose');
let {Product} = require('./db/models/products');
// instantiate Express.js
const app = express();
// Tell Handlebars where to look for partials
hbs.registerPartials(__dirname + '../../views/partials');
// Set Handlebars as default templating engine
app.set('view engine', 'hbs');
// Point app towards stylesheets
app.use(express.static(__dirname + '/public'));
// root route
app.get('/', (req, res) => {
res.render('home.hbs', {
pageTitle: 'Home Page'
});
});
// route for e-commerce site
app.get('/shop', (req, res) => {
res.render('shop.hbs', {
pageTitle: 'E-Commerce Shop'
});
});
// Specify port and run local server
let port = 3000;
app.listen(port, () => {
console.log(`listening on ${port}`);
});
module.exports = {
app
}
|
// npm dependencies
const express = require('express');
const hbs = require('hbs');
const bodyParser = require('body-parser');
// local packages
let {mongoose} = require('./db/mongoose');
let {Product} = require('./models/products');
// instantiate Express.js
const app = express();
// Tell Handlebars where to look for partials
hbs.registerPartials(__dirname + '../../views/partials');
// Set Handlebars as default templating engine
app.set('view engine', 'hbs');
// Point app towards stylesheets
app.use(express.static(__dirname + '/public'));
// Allows for JSON-formatted POST requests
app.use(bodyParser.json());
// root route
app.get('/', (req, res) => {
res.render('home.hbs', {
pageTitle: 'Home Page'
});
});
// route for e-commerce site
app.get('/shop', (req, res) => {
res.render('shop.hbs', {
pageTitle: 'E-Commerce Shop'
});
});
// API endpoint for seeding product data
app.post('/shop/products', (req, res) => {
let product = new Product({
title: req.body.title,
description: req.body.description,
price: req.body.price
});
product.save()
.then((doc) => {
res.send(doc);
}, (e) => {
res.send('Could not add product');
});
})
app.get('/shop/products', (req, res) => {
Product.find()
.then((products) => {
res.send(products);
}, (e) => {
rest.send('Could not retrieve products');
});
});
// Specify port and run local server
let port = 3000;
app.listen(port, () => {
console.log(`listening on ${port}`);
});
module.exports = {
app
}
| 30 | 1 | 4 |
mixed
|
--- a/server/server.js
+++ b/server/server.js
@@ -3,2 +3,3 @@
const hbs = require('hbs');
+const bodyParser = require('body-parser');
@@ -6,3 +7,3 @@
let {mongoose} = require('./db/mongoose');
-let {Product} = require('./db/models/products');
+let {Product} = require('./models/products');
@@ -19,2 +20,5 @@
app.use(express.static(__dirname + '/public'));
+
+// Allows for JSON-formatted POST requests
+app.use(bodyParser.json());
@@ -34,2 +38,27 @@
+// API endpoint for seeding product data
+app.post('/shop/products', (req, res) => {
+ let product = new Product({
+ title: req.body.title,
+ description: req.body.description,
+ price: req.body.price
+ });
+
+ product.save()
+ .then((doc) => {
+ res.send(doc);
+ }, (e) => {
+ res.send('Could not add product');
+ });
+})
+
+app.get('/shop/products', (req, res) => {
+ Product.find()
+ .then((products) => {
+ res.send(products);
+ }, (e) => {
+ rest.send('Could not retrieve products');
+ });
+});
+
// Specify port and run local server
|
--- a/server/server.js
+++ b/server/server.js
@@ ... @@
const hbs = require('hbs');
+const bodyParser = require('body-parser');
@@ ... @@
let {mongoose} = require('./db/mongoose');
-let {Product} = require('./db/models/products');
+let {Product} = require('./models/products');
@@ ... @@
app.use(express.static(__dirname + '/public'));
+
+// Allows for JSON-formatted POST requests
+app.use(bodyParser.json());
@@ ... @@
+// API endpoint for seeding product data
+app.post('/shop/products', (req, res) => {
+ let product = new Product({
+ title: req.body.title,
+ description: req.body.description,
+ price: req.body.price
+ });
+
+ product.save()
+ .then((doc) => {
+ res.send(doc);
+ }, (e) => {
+ res.send('Could not add product');
+ });
+})
+
+app.get('/shop/products', (req, res) => {
+ Product.find()
+ .then((products) => {
+ res.send(products);
+ }, (e) => {
+ rest.send('Could not retrieve products');
+ });
+});
+
// Specify port and run local server
|
--- a/server/server.js
+++ b/server/server.js
@@ -3,2 +3,3 @@
CON const hbs = require('hbs');
ADD const bodyParser = require('body-parser');
CON
@@ -6,3 +7,3 @@
CON let {mongoose} = require('./db/mongoose');
DEL let {Product} = require('./db/models/products');
ADD let {Product} = require('./models/products');
CON
@@ -19,2 +20,5 @@
CON app.use(express.static(__dirname + '/public'));
ADD
ADD // Allows for JSON-formatted POST requests
ADD app.use(bodyParser.json());
CON
@@ -34,2 +38,27 @@
CON
ADD // API endpoint for seeding product data
ADD app.post('/shop/products', (req, res) => {
ADD let product = new Product({
ADD title: req.body.title,
ADD description: req.body.description,
ADD price: req.body.price
ADD });
ADD
ADD product.save()
ADD .then((doc) => {
ADD res.send(doc);
ADD }, (e) => {
ADD res.send('Could not add product');
ADD });
ADD })
ADD
ADD app.get('/shop/products', (req, res) => {
ADD Product.find()
ADD .then((products) => {
ADD res.send(products);
ADD }, (e) => {
ADD rest.send('Could not retrieve products');
ADD });
ADD });
ADD
CON // Specify port and run local server
|
<<<<<<< SEARCH
const express = require('express');
const hbs = require('hbs');
// local packages
let {mongoose} = require('./db/mongoose');
let {Product} = require('./db/models/products');
// instantiate Express.js
=======
const express = require('express');
const hbs = require('hbs');
const bodyParser = require('body-parser');
// local packages
let {mongoose} = require('./db/mongoose');
let {Product} = require('./models/products');
// instantiate Express.js
>>>>>>> REPLACE
|
zensum/franz
|
4c3b4267c5b60d94a25373aaa97009da81c502b1
|
src/main/kotlin/engine/mock/MockConsumerBase.kt
|
kotlin
|
mit
|
Update to MockConsumerActorBase to actually implement ConsumerActor (add arg "scope" to function createWorker).
|
package franz.engine.mock
import franz.JobStateException
import franz.JobStatus
import franz.Message
import franz.engine.ConsumerActor
import franz.engine.WorkerFunction
import kotlinx.coroutines.runBlocking
abstract class MockConsumerActorBase<T, U> : ConsumerActor<T, U> {
data class Result(
val throwable: Throwable?,
val status: JobStatus
)
private val internalResults: MutableList<Result> = mutableListOf()
fun results() = internalResults.toList()
protected var handlers = mutableListOf<(Message<T, U>) -> Unit>()
override fun start() = Unit
override fun stop() = Unit
override fun setJobStatus(msg: Message<T, U>, status: JobStatus) {
internalResults.add(Result(throwable = null, status = status))
}
private fun setException(e: Throwable) {
internalResults.add(Result(e, JobStatus.TransientFailure))
}
override fun createWorker(fn: WorkerFunction<T, U>) {
worker(this, fn)
}
private inline fun tryJobStatus(fn: () -> JobStatus) = try {
fn()
} catch(ex: JobStateException){
ex.result
} catch (ex: Exception) {
JobStatus.TransientFailure
}
private fun worker(consumer: ConsumerActor<T, U>, fn: WorkerFunction<T, U>) {
consumer.subscribe {
setJobStatus(it, tryJobStatus { runBlocking{fn(it) } })
}
}
fun createFactory() =
MockConsumerActorFactory(this)
}
|
package franz.engine.mock
import franz.JobStateException
import franz.JobStatus
import franz.Message
import franz.engine.ConsumerActor
import franz.engine.WorkerFunction
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.runBlocking
abstract class MockConsumerActorBase<T, U> : ConsumerActor<T, U> {
data class Result(
val throwable: Throwable?,
val status: JobStatus
)
private val internalResults: MutableList<Result> = mutableListOf()
fun results() = internalResults.toList()
protected var handlers = mutableListOf<(Message<T, U>) -> Unit>()
override fun start() = Unit
override fun stop() = Unit
override fun setJobStatus(msg: Message<T, U>, status: JobStatus) {
internalResults.add(Result(throwable = null, status = status))
}
private fun setException(e: Throwable) {
internalResults.add(Result(e, JobStatus.TransientFailure))
}
override fun createWorker(fn: WorkerFunction<T, U>, scope: CoroutineScope) {
worker(this, fn)
}
private inline fun tryJobStatus(fn: () -> JobStatus) = try {
fn()
} catch(ex: JobStateException){
ex.result
} catch (ex: Exception) {
JobStatus.TransientFailure
}
private fun worker(consumer: ConsumerActor<T, U>, fn: WorkerFunction<T, U>) {
consumer.subscribe {
setJobStatus(it, tryJobStatus { runBlocking{fn(it) } })
}
}
fun createFactory() =
MockConsumerActorFactory(this)
}
| 2 | 1 | 2 |
mixed
|
--- a/src/main/kotlin/engine/mock/MockConsumerBase.kt
+++ b/src/main/kotlin/engine/mock/MockConsumerBase.kt
@@ -7,2 +7,3 @@
import franz.engine.WorkerFunction
+import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.runBlocking
@@ -32,3 +33,3 @@
- override fun createWorker(fn: WorkerFunction<T, U>) {
+ override fun createWorker(fn: WorkerFunction<T, U>, scope: CoroutineScope) {
worker(this, fn)
|
--- a/src/main/kotlin/engine/mock/MockConsumerBase.kt
+++ b/src/main/kotlin/engine/mock/MockConsumerBase.kt
@@ ... @@
import franz.engine.WorkerFunction
+import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.runBlocking
@@ ... @@
- override fun createWorker(fn: WorkerFunction<T, U>) {
+ override fun createWorker(fn: WorkerFunction<T, U>, scope: CoroutineScope) {
worker(this, fn)
|
--- a/src/main/kotlin/engine/mock/MockConsumerBase.kt
+++ b/src/main/kotlin/engine/mock/MockConsumerBase.kt
@@ -7,2 +7,3 @@
CON import franz.engine.WorkerFunction
ADD import kotlinx.coroutines.CoroutineScope
CON import kotlinx.coroutines.runBlocking
@@ -32,3 +33,3 @@
CON
DEL override fun createWorker(fn: WorkerFunction<T, U>) {
ADD override fun createWorker(fn: WorkerFunction<T, U>, scope: CoroutineScope) {
CON worker(this, fn)
|
<<<<<<< SEARCH
import franz.engine.ConsumerActor
import franz.engine.WorkerFunction
import kotlinx.coroutines.runBlocking
=======
import franz.engine.ConsumerActor
import franz.engine.WorkerFunction
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.runBlocking
>>>>>>> REPLACE
|
Wallacoloo/serde_osc
|
c8449319aad7a52fc5adefa7eaa29074dbe054d3
|
examples/to_from_vec.rs
|
rust
|
apache-2.0
|
Update example to conform to new message arg behavior
|
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate serde_bytes;
extern crate serde_osc;
use serde_bytes::ByteBuf;
use serde_osc::{de, ser};
/// Struct we'll serialize.
/// This represents a single OSC message with three arguments:
/// one of type 'i', 'f' and 'b', encoded in the order they appear in the struct.
#[derive(Debug, Deserialize, Serialize)]
struct Message {
address: String,
num_channels: i32,
rate: f32,
// ByteBuf is the object we use for OSC "blobs".
// It's a thin wrapper over Vec<u8> provided by Serde that allows
// for more computationally-efficient serialization/deserialization.
content: ByteBuf,
}
fn main() {
let message = Message {
address: "/audio/play".to_owned(),
num_channels: 1,
rate: 44100.0f32,
content: ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef])
};
println!("Serializing {:?}", message);
// Serialize the message to an OSC packet stored in a Vec<u8>
let as_vec = ser::to_vec(&message).unwrap();
println!("Serialied to: {:?}", as_vec);
// Deserialize an OSC packet contained in a Vec<u8> into the Message struct
let received: Message = de::from_vec(&as_vec).unwrap();
println!("Received: {:?}", received);
}
|
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate serde_bytes;
extern crate serde_osc;
use serde_bytes::ByteBuf;
use serde_osc::{de, ser};
/// Struct we'll serialize.
/// This represents a single OSC message with three arguments:
/// one of type 'i', 'f' and 'b', encoded in the order they appear in the struct.
#[derive(Debug, Deserialize, Serialize)]
struct Message {
address: String,
// ByteBuf is the object we use for OSC "blobs".
// It's a thin wrapper over Vec<u8> provided by Serde that allows
// for more computationally-efficient serialization/deserialization.
args: (i32, f32, ByteBuf),
}
fn main() {
let message = Message {
address: "/audio/play".to_owned(),
args: (
1,
44100.0f32,
ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef]),
)
};
println!("Serializing {:?}", message);
// Serialize the message to an OSC packet stored in a Vec<u8>
let as_vec = ser::to_vec(&message).unwrap();
println!("Serialied to: {:?}", as_vec);
// Deserialize an OSC packet contained in a Vec<u8> into the Message struct
let received: Message = de::from_vec(&as_vec).unwrap();
println!("Received: {:?}", received);
}
| 7 | 6 | 4 |
mixed
|
--- a/examples/to_from_vec.rs
+++ b/examples/to_from_vec.rs
@@ -15,4 +15,2 @@
address: String,
- num_channels: i32,
- rate: f32,
// ByteBuf is the object we use for OSC "blobs".
@@ -20,3 +18,3 @@
// for more computationally-efficient serialization/deserialization.
- content: ByteBuf,
+ args: (i32, f32, ByteBuf),
}
@@ -26,5 +24,7 @@
address: "/audio/play".to_owned(),
- num_channels: 1,
- rate: 44100.0f32,
- content: ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef])
+ args: (
+ 1,
+ 44100.0f32,
+ ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef]),
+ )
};
@@ -40 +40,2 @@
}
+
|
--- a/examples/to_from_vec.rs
+++ b/examples/to_from_vec.rs
@@ ... @@
address: String,
- num_channels: i32,
- rate: f32,
// ByteBuf is the object we use for OSC "blobs".
@@ ... @@
// for more computationally-efficient serialization/deserialization.
- content: ByteBuf,
+ args: (i32, f32, ByteBuf),
}
@@ ... @@
address: "/audio/play".to_owned(),
- num_channels: 1,
- rate: 44100.0f32,
- content: ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef])
+ args: (
+ 1,
+ 44100.0f32,
+ ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef]),
+ )
};
@@ ... @@
}
+
|
--- a/examples/to_from_vec.rs
+++ b/examples/to_from_vec.rs
@@ -15,4 +15,2 @@
CON address: String,
DEL num_channels: i32,
DEL rate: f32,
CON // ByteBuf is the object we use for OSC "blobs".
@@ -20,3 +18,3 @@
CON // for more computationally-efficient serialization/deserialization.
DEL content: ByteBuf,
ADD args: (i32, f32, ByteBuf),
CON }
@@ -26,5 +24,7 @@
CON address: "/audio/play".to_owned(),
DEL num_channels: 1,
DEL rate: 44100.0f32,
DEL content: ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef])
ADD args: (
ADD 1,
ADD 44100.0f32,
ADD ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef]),
ADD )
CON };
@@ -40 +40,2 @@
CON }
ADD
|
<<<<<<< SEARCH
struct Message {
address: String,
num_channels: i32,
rate: f32,
// ByteBuf is the object we use for OSC "blobs".
// It's a thin wrapper over Vec<u8> provided by Serde that allows
// for more computationally-efficient serialization/deserialization.
content: ByteBuf,
}
fn main() {
let message = Message {
address: "/audio/play".to_owned(),
num_channels: 1,
rate: 44100.0f32,
content: ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef])
};
println!("Serializing {:?}", message);
=======
struct Message {
address: String,
// ByteBuf is the object we use for OSC "blobs".
// It's a thin wrapper over Vec<u8> provided by Serde that allows
// for more computationally-efficient serialization/deserialization.
args: (i32, f32, ByteBuf),
}
fn main() {
let message = Message {
address: "/audio/play".to_owned(),
args: (
1,
44100.0f32,
ByteBuf::from(vec![0xde, 0xad, 0xbe, 0xef]),
)
};
println!("Serializing {:?}", message);
>>>>>>> REPLACE
|
mrjmad/invocations
|
fc75f5843af70c09e0d63284277bf88689cbb06d
|
invocations/docs.py
|
python
|
bsd-2-clause
|
Add apidoc to doc building
|
import os
from invoke.tasks import task
from invoke.runner import run
docs_dir = 'docs'
build = os.path.join(docs_dir, '_build')
@task
def clean_docs():
run("rm -rf %s" % build)
@task
def browse_docs():
run("open %s" % os.path.join(build, 'index.html'))
@task
def docs(clean=False, browse=False):
if clean:
clean_docs.body()
run("sphinx-build %s %s" % (docs_dir, build), pty=True)
if browse:
browse_docs.body()
|
import os
from invoke.tasks import task
from invoke.runner import run
docs_dir = 'docs'
build = os.path.join(docs_dir, '_build')
@task
def clean_docs():
run("rm -rf %s" % build)
@task
def browse_docs():
run("open %s" % os.path.join(build, 'index.html'))
@task
def api_docs(target, output="api", exclude=""):
"""
Runs ``sphinx-apidoc`` to autogenerate your API docs.
Must give target directory/package as ``target``. Results are written out
to ``docs/<output>`` (``docs/api`` by default).
To exclude certain output files from the final build give ``exclude`` as a
comma separated list of file paths.
"""
output = os.path.join('docs', output)
# Have to make these absolute or apidoc is dumb :(
exclude = map(
lambda x: os.path.abspath(os.path.join(os.getcwd(), x)),
exclude.split(',')
)
run("sphinx-apidoc -o %s %s %s" % (output, target, ' '.join(exclude)))
@task
def docs(clean=False, browse=False, api_target=None, api_output=None,
api_exclude=None):
"""
Build Sphinx docs, optionally ``clean``ing and/or ``browse``ing.
Can also build API docs by giving ``api_target`` and optionally
``api_output`` and/or ``api_exclude``.
"""
if api_target:
kwargs = {'target': api_target}
if api_output:
kwargs['output'] = api_output
if api_exclude:
kwargs['exclude'] = api_exclude
api_docs.body(**kwargs)
if clean:
clean_docs.body()
run("sphinx-build %s %s" % (docs_dir, build), pty=True)
if browse:
browse_docs.body()
| 35 | 1 | 1 |
mixed
|
--- a/invocations/docs.py
+++ b/invocations/docs.py
@@ -21,3 +21,37 @@
@task
-def docs(clean=False, browse=False):
+def api_docs(target, output="api", exclude=""):
+ """
+ Runs ``sphinx-apidoc`` to autogenerate your API docs.
+
+ Must give target directory/package as ``target``. Results are written out
+ to ``docs/<output>`` (``docs/api`` by default).
+
+ To exclude certain output files from the final build give ``exclude`` as a
+ comma separated list of file paths.
+ """
+ output = os.path.join('docs', output)
+ # Have to make these absolute or apidoc is dumb :(
+ exclude = map(
+ lambda x: os.path.abspath(os.path.join(os.getcwd(), x)),
+ exclude.split(',')
+ )
+ run("sphinx-apidoc -o %s %s %s" % (output, target, ' '.join(exclude)))
+
+
+@task
+def docs(clean=False, browse=False, api_target=None, api_output=None,
+ api_exclude=None):
+ """
+ Build Sphinx docs, optionally ``clean``ing and/or ``browse``ing.
+
+ Can also build API docs by giving ``api_target`` and optionally
+ ``api_output`` and/or ``api_exclude``.
+ """
+ if api_target:
+ kwargs = {'target': api_target}
+ if api_output:
+ kwargs['output'] = api_output
+ if api_exclude:
+ kwargs['exclude'] = api_exclude
+ api_docs.body(**kwargs)
if clean:
|
--- a/invocations/docs.py
+++ b/invocations/docs.py
@@ ... @@
@task
-def docs(clean=False, browse=False):
+def api_docs(target, output="api", exclude=""):
+ """
+ Runs ``sphinx-apidoc`` to autogenerate your API docs.
+
+ Must give target directory/package as ``target``. Results are written out
+ to ``docs/<output>`` (``docs/api`` by default).
+
+ To exclude certain output files from the final build give ``exclude`` as a
+ comma separated list of file paths.
+ """
+ output = os.path.join('docs', output)
+ # Have to make these absolute or apidoc is dumb :(
+ exclude = map(
+ lambda x: os.path.abspath(os.path.join(os.getcwd(), x)),
+ exclude.split(',')
+ )
+ run("sphinx-apidoc -o %s %s %s" % (output, target, ' '.join(exclude)))
+
+
+@task
+def docs(clean=False, browse=False, api_target=None, api_output=None,
+ api_exclude=None):
+ """
+ Build Sphinx docs, optionally ``clean``ing and/or ``browse``ing.
+
+ Can also build API docs by giving ``api_target`` and optionally
+ ``api_output`` and/or ``api_exclude``.
+ """
+ if api_target:
+ kwargs = {'target': api_target}
+ if api_output:
+ kwargs['output'] = api_output
+ if api_exclude:
+ kwargs['exclude'] = api_exclude
+ api_docs.body(**kwargs)
if clean:
|
--- a/invocations/docs.py
+++ b/invocations/docs.py
@@ -21,3 +21,37 @@
CON @task
DEL def docs(clean=False, browse=False):
ADD def api_docs(target, output="api", exclude=""):
ADD """
ADD Runs ``sphinx-apidoc`` to autogenerate your API docs.
ADD
ADD Must give target directory/package as ``target``. Results are written out
ADD to ``docs/<output>`` (``docs/api`` by default).
ADD
ADD To exclude certain output files from the final build give ``exclude`` as a
ADD comma separated list of file paths.
ADD """
ADD output = os.path.join('docs', output)
ADD # Have to make these absolute or apidoc is dumb :(
ADD exclude = map(
ADD lambda x: os.path.abspath(os.path.join(os.getcwd(), x)),
ADD exclude.split(',')
ADD )
ADD run("sphinx-apidoc -o %s %s %s" % (output, target, ' '.join(exclude)))
ADD
ADD
ADD @task
ADD def docs(clean=False, browse=False, api_target=None, api_output=None,
ADD api_exclude=None):
ADD """
ADD Build Sphinx docs, optionally ``clean``ing and/or ``browse``ing.
ADD
ADD Can also build API docs by giving ``api_target`` and optionally
ADD ``api_output`` and/or ``api_exclude``.
ADD """
ADD if api_target:
ADD kwargs = {'target': api_target}
ADD if api_output:
ADD kwargs['output'] = api_output
ADD if api_exclude:
ADD kwargs['exclude'] = api_exclude
ADD api_docs.body(**kwargs)
CON if clean:
|
<<<<<<< SEARCH
@task
def docs(clean=False, browse=False):
if clean:
clean_docs.body()
=======
@task
def api_docs(target, output="api", exclude=""):
"""
Runs ``sphinx-apidoc`` to autogenerate your API docs.
Must give target directory/package as ``target``. Results are written out
to ``docs/<output>`` (``docs/api`` by default).
To exclude certain output files from the final build give ``exclude`` as a
comma separated list of file paths.
"""
output = os.path.join('docs', output)
# Have to make these absolute or apidoc is dumb :(
exclude = map(
lambda x: os.path.abspath(os.path.join(os.getcwd(), x)),
exclude.split(',')
)
run("sphinx-apidoc -o %s %s %s" % (output, target, ' '.join(exclude)))
@task
def docs(clean=False, browse=False, api_target=None, api_output=None,
api_exclude=None):
"""
Build Sphinx docs, optionally ``clean``ing and/or ``browse``ing.
Can also build API docs by giving ``api_target`` and optionally
``api_output`` and/or ``api_exclude``.
"""
if api_target:
kwargs = {'target': api_target}
if api_output:
kwargs['output'] = api_output
if api_exclude:
kwargs['exclude'] = api_exclude
api_docs.body(**kwargs)
if clean:
clean_docs.body()
>>>>>>> REPLACE
|
edelooff/sqlalchemy-json
|
db6b869eae416e72fa30b1d7271b0ed1d7dc1a55
|
sqlalchemy_json/__init__.py
|
python
|
bsd-2-clause
|
Fix error when setting JSON value to be `None`
Previously this would raise an attribute error as `None` does not
have the `coerce` attribute.
|
from sqlalchemy.ext.mutable import (
Mutable,
MutableDict)
from sqlalchemy_utils.types.json import JSONType
from . track import (
TrackedDict,
TrackedList)
__all__ = 'MutableJson', 'NestedMutableJson'
class NestedMutableDict(TrackedDict, Mutable):
@classmethod
def coerce(cls, key, value):
if isinstance(value, cls):
return value
if isinstance(value, dict):
return cls(value)
return super(cls).coerce(key, value)
class NestedMutableList(TrackedList, Mutable):
@classmethod
def coerce(cls, key, value):
if isinstance(value, cls):
return value
if isinstance(value, list):
return cls(value)
return super(cls).coerce(key, value)
class NestedMutable(Mutable):
"""SQLAlchemy `mutable` extension with nested change tracking."""
@classmethod
def coerce(cls, key, value):
"""Convert plain dictionary to NestedMutable."""
if isinstance(value, cls):
return value
if isinstance(value, dict):
return NestedMutableDict.coerce(key, value)
if isinstance(value, list):
return NestedMutableList.coerce(key, value)
return super(cls).coerce(key, value)
class MutableJson(JSONType):
"""JSON type for SQLAlchemy with change tracking at top level."""
class NestedMutableJson(JSONType):
"""JSON type for SQLAlchemy with nested change tracking."""
MutableDict.associate_with(MutableJson)
NestedMutable.associate_with(NestedMutableJson)
|
from sqlalchemy.ext.mutable import (
Mutable,
MutableDict)
from sqlalchemy_utils.types.json import JSONType
from . track import (
TrackedDict,
TrackedList)
__all__ = 'MutableJson', 'NestedMutableJson'
class NestedMutableDict(TrackedDict, Mutable):
@classmethod
def coerce(cls, key, value):
if isinstance(value, cls):
return value
if isinstance(value, dict):
return cls(value)
return super(cls).coerce(key, value)
class NestedMutableList(TrackedList, Mutable):
@classmethod
def coerce(cls, key, value):
if isinstance(value, cls):
return value
if isinstance(value, list):
return cls(value)
return super(cls).coerce(key, value)
class NestedMutable(Mutable):
"""SQLAlchemy `mutable` extension with nested change tracking."""
@classmethod
def coerce(cls, key, value):
"""Convert plain dictionary to NestedMutable."""
if value is None:
return value
if isinstance(value, cls):
return value
if isinstance(value, dict):
return NestedMutableDict.coerce(key, value)
if isinstance(value, list):
return NestedMutableList.coerce(key, value)
return super(cls).coerce(key, value)
class MutableJson(JSONType):
"""JSON type for SQLAlchemy with change tracking at top level."""
class NestedMutableJson(JSONType):
"""JSON type for SQLAlchemy with nested change tracking."""
MutableDict.associate_with(MutableJson)
NestedMutable.associate_with(NestedMutableJson)
| 2 | 0 | 1 |
add_only
|
--- a/sqlalchemy_json/__init__.py
+++ b/sqlalchemy_json/__init__.py
@@ -37,2 +37,4 @@
"""Convert plain dictionary to NestedMutable."""
+ if value is None:
+ return value
if isinstance(value, cls):
|
--- a/sqlalchemy_json/__init__.py
+++ b/sqlalchemy_json/__init__.py
@@ ... @@
"""Convert plain dictionary to NestedMutable."""
+ if value is None:
+ return value
if isinstance(value, cls):
|
--- a/sqlalchemy_json/__init__.py
+++ b/sqlalchemy_json/__init__.py
@@ -37,2 +37,4 @@
CON """Convert plain dictionary to NestedMutable."""
ADD if value is None:
ADD return value
CON if isinstance(value, cls):
|
<<<<<<< SEARCH
def coerce(cls, key, value):
"""Convert plain dictionary to NestedMutable."""
if isinstance(value, cls):
return value
=======
def coerce(cls, key, value):
"""Convert plain dictionary to NestedMutable."""
if value is None:
return value
if isinstance(value, cls):
return value
>>>>>>> REPLACE
|
End of preview. Expand
in Data Studio
Diff-XYZ
This is a dataset for the paper: Diff-XYZ: A Benchmark for Evaluating Diff Understanding.
Diff-XYZ contains 1,000 real-world code edits sampled and filtered from
the CommitPackFT dataset.
Each example provides three components: the original file contents (old_code), the modified contents (new_code), and
multiple diff representations (udiff, udiff-h, udiff-l, and search-replace).
These formats enable evaluation of LLM capabilities on three code editing tasks:
- Apply: Given old code and diff, generate new code
- Anti-Apply: Given new code and diff, generate old code
- Diff-Generation: Given old and new code, generate the diff
How it's built
Examples were systematically filtered from CommitPackFT to ensure high quality:
- Scope: Single-file changes only, excluding binary files, vendor directories, and generated code
- Quality: Removed trivial changes (whitespace-only) and likely test files
- Size: Required 40+ lines in at least one version of the code (old or new); excluded very large files (1000+ lines)
- Sampling:
- Target 50/50 split between single-hunk and multi-hunk edits (hunks counted via
@@markers in the unified diff). - Within each hunk group, stratify by change size (lines added + removed) using the 40th/80th percentiles of that group, targeting ≈40% small, 40% medium, 20% large.
- Cap examples to ≤5 per repository per language.
- Target 50/50 split between single-hunk and multi-hunk edits (hunks counted via
Dataset Statistics
- Total examples: 1,000
- Languages: Python (200), JavaScript (200), Java (200), Kotlin (200), Rust (200)
Hunks:
- 1 hunk: 500 (50.0%)
- 2+ hunks: 500 (50.0%)
Change size (added + removed), stratified within hunk groups:
- Small (≤40th pct): 424 (42.4%)
- Medium (40th–80th): 388 (38.8%)
- Large (>80th): 188 (18.8%)
Change type:
- Mixed (additions + deletions): 815 (81.5%)
- Add-only: 163 (16.3%)
- Delete-only: 22 (2.2%)
Repository diversity: 891 unique repositories.
Schema
| Field | Type | Description |
|---|---|---|
repo |
string | Repository identifier (e.g., "owner/name") |
commit |
string | Commit SHA that produced the change |
path |
string | File path relative to repo root |
lang |
string | One of: python, javascript, java, kotlin, rust |
license |
string | SPDX license identifier of the repo |
message |
string | Original commit message |
old_code |
string | Complete file contents before change |
new_code |
string | Complete file contents after change |
n_added |
int | # of + lines (excluding headers) |
n_removed |
int | # of - lines (excluding headers) |
n_hunks |
int | # of @@ (hunk) sections |
change_kind |
string | add_only, del_only, or mixed |
udiff |
string | Standard unified diff (1-line context, numeric hunk headers) |
udiff-h |
string | Unified diff with relaxed hunk headers written as @@ ... @@ |
udiff-l |
string | Unified diff with explicit line markers: ADD, DEL, and CON |
search-replace |
string | Search/replace representation: pairs of SEARCH/REPLACE edit blocks |
- Downloads last month
- 237