Skip to content

Commit

Permalink
Update scalafmt-core to 2.7.0
Browse files Browse the repository at this point in the history
  • Loading branch information
47erbot authored Sep 9, 2020
1 parent d4bede3 commit 58f9197
Show file tree
Hide file tree
Showing 12 changed files with 216 additions and 269 deletions.
2 changes: 1 addition & 1 deletion .scalafmt.conf
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
version = 2.6.4
version = 2.7.0

style = defaultWithAlign
maxColumn = 100
Expand Down
8 changes: 4 additions & 4 deletions fetch-debug/src/main/scala/debug.scala
Original file line number Diff line number Diff line change
Expand Up @@ -39,16 +39,16 @@ object debug {
def firstRequest(r: Round): Option[Long] =
for {
aQuery <- r.queries.headOption
firstR = r.queries.foldLeft(aQuery.start)({
case (acc, q) => acc min q.start
firstR = r.queries.foldLeft(aQuery.start)({ case (acc, q) =>
acc min q.start
})
} yield firstR

def lastRequest(r: Round): Option[Long] =
for {
aQuery <- r.queries.headOption
lastR = r.queries.foldLeft(aQuery.end)({
case (acc, q) => acc max q.end
lastR = r.queries.foldLeft(aQuery.end)({ case (acc, q) =>
acc max q.end
})
} yield lastR

Expand Down
5 changes: 2 additions & 3 deletions fetch-examples/src/test/scala/GithubExample.scala
Original file line number Diff line number Diff line change
Expand Up @@ -196,9 +196,8 @@ class GithubExample extends AnyWordSpec with Matchers {
case class Project(repo: Repo, contributors: List[Contributor], languages: List[Language])

def fetchProject[F[_]: ConcurrentEffect](repo: Repo): Fetch[F, Project] =
(repoContributors(repo), repoLanguages(repo)).mapN({
case (contribs, langs) =>
Project(repo = repo, contributors = contribs, languages = langs)
(repoContributors(repo), repoLanguages(repo)).mapN({ case (contribs, langs) =>
Project(repo = repo, contributors = contribs, languages = langs)
})

def fetchOrg[F[_]: ConcurrentEffect](org: String) =
Expand Down
30 changes: 14 additions & 16 deletions fetch-examples/src/test/scala/GraphQLExample.scala
Original file line number Diff line number Diff line change
Expand Up @@ -204,8 +204,8 @@ class GraphQLExample extends AnyWordSpec with Matchers {
repos
.take(n)
.traverse(repo =>
(Languages.fetch(repo), Collaborators.fetch(repo)).mapN {
case (ls, cs) => Project(name >> Some(repo.name), ls, cs)
(Languages.fetch(repo), Collaborators.fetch(repo)).mapN { case (ls, cs) =>
Project(name >> Some(repo.name), ls, cs)
}
)
} yield projects
Expand Down Expand Up @@ -233,20 +233,18 @@ class GraphQLExample extends AnyWordSpec with Matchers {

object Parsers {
def queryParser: Parser[OrganizationQuery] =
rawParser.map({
case (o, n) =>
OrganizationQuery(
o,
n.map({
case (i, name, langs, colls) =>
RepositoriesQuery(
i,
if (name) Some(()) else None,
if (langs) Some(LanguagesQuery()) else None,
if (colls) Some(CollaboratorsQuery()) else None
)
})
)
rawParser.map({ case (o, n) =>
OrganizationQuery(
o,
n.map({ case (i, name, langs, colls) =>
RepositoriesQuery(
i,
if (name) Some(()) else None,
if (langs) Some(LanguagesQuery()) else None,
if (colls) Some(CollaboratorsQuery()) else None
)
})
)
})

def rawParser: Parser[(String, Option[(Int, Boolean, Boolean, Boolean)])] =
Expand Down
5 changes: 2 additions & 3 deletions fetch-examples/src/test/scala/Http4sExample.scala
Original file line number Diff line number Diff line change
Expand Up @@ -158,9 +158,8 @@ class Http4sExample extends AnyWordSpec with Matchers {
val (log, results) = io.unsafeRunSync

results
.map {
case (user, posts) =>
s"${user.username} has ${posts.size} posts"
.map { case (user, posts) =>
s"${user.username} has ${posts.size} posts"
}
.foreach(println)
log.rounds.size shouldEqual 2
Expand Down
29 changes: 13 additions & 16 deletions fetch-examples/src/test/scala/JedisExample.scala
Original file line number Diff line number Diff line change
Expand Up @@ -96,13 +96,12 @@ object Binary {
): F[ByteArray] = {
byteOutputStream
.mproduct(outputStream(_))
.use({
case (byte, out) =>
S.delay {
out.writeObject(obj)
out.flush()
byte.toByteArray
}
.use({ case (byte, out) =>
S.delay {
out.writeObject(obj)
out.flush()
byte.toByteArray
}
})
}

Expand All @@ -111,12 +110,11 @@ object Binary {
): F[Option[A]] = {
byteInputStream(bin)
.mproduct(inputStream(_))
.use({
case (byte, in) =>
S.delay {
val obj = in.readObject()
Try(obj.asInstanceOf[A]).toOption
}
.use({ case (byte, in) =>
S.delay {
val obj = in.readObject()
Try(obj.asInstanceOf[A]).toOption
}
})
}
}
Expand Down Expand Up @@ -161,9 +159,8 @@ case class RedisCache[F[_]: Sync](host: String) extends DataCache[F] {
M: Monad[F]
): F[DataCache[F]] =
for {
bin <- vs.traverse({
case (id, v) =>
Binary.serialize(v).tupleRight(cacheId(id, data))
bin <- vs.traverse({ case (id, v) =>
Binary.serialize(v).tupleRight(cacheId(id, data))
})
_ <- Sync[F].delay(bulkSet(bin))
} yield this
Expand Down
7 changes: 3 additions & 4 deletions fetch-examples/src/test/scala/MonixExample.scala
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,9 @@ class MonixExample extends AnyWordSpec with Matchers {
val task = Fetch.runLog[Task](fetch)

task
.map({
case (log, result) =>
result shouldEqual Author(1, "William Shakespeare")
log.rounds.size shouldEqual 1
.map({ case (log, result) =>
result shouldEqual Author(1, "William Shakespeare")
log.rounds.size shouldEqual 1
})
.runToFuture
}
Expand Down
5 changes: 2 additions & 3 deletions fetch/src/main/scala/cache.scala
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,8 @@ trait DataCache[F[_]] {
def bulkInsert[I, A](vs: List[(I, A)], data: Data[I, A])(implicit
M: Monad[F]
): F[DataCache[F]] = {
vs.foldLeftM(this) {
case (acc, (i, v)) =>
acc.insert(i, v, data)
vs.foldLeftM(this) { case (acc, (i, v)) =>
acc.insert(i, v, data)
}
}
}
Expand Down
28 changes: 13 additions & 15 deletions fetch/src/main/scala/fetch.scala
Original file line number Diff line number Diff line change
Expand Up @@ -227,17 +227,15 @@ object `package` {
/* Combine two `RequestMap` instances to batch requests to the same data source. */
private def combineRequestMaps[F[_]: Monad](x: RequestMap[F], y: RequestMap[F]): RequestMap[F] =
RequestMap(
x.m.foldLeft(y.m) {
case (acc, (dsId, (ds, blocked))) =>
val combined = acc
.get(dsId)
.fold(
(ds, blocked)
)({
case (d, req) =>
(d, combineRequests(blocked, req))
})
acc.updated(dsId, combined)
x.m.foldLeft(y.m) { case (acc, (dsId, (ds, blocked))) =>
val combined = acc
.get(dsId)
.fold(
(ds, blocked)
)({ case (d, req) =>
(d, combineRequests(blocked, req))
})
acc.updated(dsId, combined)
}
)

Expand Down Expand Up @@ -581,8 +579,8 @@ object `package` {
requests <- FetchExecution.parallel(
NonEmptyList
.fromListUnsafe(blocked)
.map({
case (ds, req) => runBlockedRequest(req, ds, cache, log)
.map({ case (ds, req) =>
runBlockedRequest(req, ds, cache, log)
})
)
performedRequests = requests.foldLeft(List.empty[Request])(_ ++ _)
Expand Down Expand Up @@ -672,8 +670,8 @@ object `package` {

// Remove cached IDs
idLookups <- q.ids.traverse[F, (Any, Option[Any])]((i) => c.lookup(i, q.data).tupleLeft(i))
(uncachedIds, cached) = idLookups.toList.partitionEither {
case (i, result) => result.tupleLeft(i).toRight(i)
(uncachedIds, cached) = idLookups.toList.partitionEither { case (i, result) =>
result.tupleLeft(i).toRight(i)
}
cachedResults = cached.toMap
result <- uncachedIds.toNel match {
Expand Down
71 changes: 32 additions & 39 deletions fetch/src/test/scala/FetchBatchingTests.scala
Original file line number Diff line number Diff line change
Expand Up @@ -109,12 +109,11 @@ class FetchBatchingTests extends FetchSpec {

val io = Fetch.runLog[IO](fetch)

io.map({
case (log, result) =>
result shouldEqual List(1, 2, 3, 4, 5)
log.rounds.size shouldEqual 1
totalFetched(log.rounds) shouldEqual 5
totalBatches(log.rounds) shouldEqual 3
io.map({ case (log, result) =>
result shouldEqual List(1, 2, 3, 4, 5)
log.rounds.size shouldEqual 1
totalFetched(log.rounds) shouldEqual 5
totalBatches(log.rounds) shouldEqual 3
}).unsafeToFuture
}

Expand All @@ -124,12 +123,11 @@ class FetchBatchingTests extends FetchSpec {

val io = Fetch.runLog[IO](fetch)

io.map({
case (log, result) =>
result shouldEqual List(1, 2, 3, 4, 5)
log.rounds.size shouldEqual 1
totalFetched(log.rounds) shouldEqual 5
totalBatches(log.rounds) shouldEqual 3
io.map({ case (log, result) =>
result shouldEqual List(1, 2, 3, 4, 5)
log.rounds.size shouldEqual 1
totalFetched(log.rounds) shouldEqual 5
totalBatches(log.rounds) shouldEqual 3
}).unsafeToFuture
}

Expand All @@ -140,12 +138,11 @@ class FetchBatchingTests extends FetchSpec {

val io = Fetch.runLog[IO](fetch)

io.map({
case (log, result) =>
result shouldEqual List(1, 2, 3, 4, 5)
log.rounds.size shouldEqual 1
totalFetched(log.rounds) shouldEqual 5 + 5
totalBatches(log.rounds) shouldEqual 3 + 3
io.map({ case (log, result) =>
result shouldEqual List(1, 2, 3, 4, 5)
log.rounds.size shouldEqual 1
totalFetched(log.rounds) shouldEqual 5 + 5
totalBatches(log.rounds) shouldEqual 3 + 3
}).unsafeToFuture
}

Expand All @@ -156,12 +153,11 @@ class FetchBatchingTests extends FetchSpec {

val io = Fetch.runLog[IO](fetch)

io.map({
case (log, result) =>
result shouldEqual List(1, 2, 3, 4, 5)
log.rounds.size shouldEqual 1
totalFetched(log.rounds) shouldEqual 5 + 5
totalBatches(log.rounds) shouldEqual 3 + 3
io.map({ case (log, result) =>
result shouldEqual List(1, 2, 3, 4, 5)
log.rounds.size shouldEqual 1
totalFetched(log.rounds) shouldEqual 5 + 5
totalBatches(log.rounds) shouldEqual 3 + 3
}).unsafeToFuture
}

Expand All @@ -171,12 +167,11 @@ class FetchBatchingTests extends FetchSpec {

val io = Fetch.runLog[IO](fetch)

io.map({
case (log, result) =>
result shouldEqual List(1, 2, 3)
log.rounds.size shouldEqual 1
totalFetched(log.rounds) shouldEqual 3
totalBatches(log.rounds) shouldEqual 2
io.map({ case (log, result) =>
result shouldEqual List(1, 2, 3)
log.rounds.size shouldEqual 1
totalFetched(log.rounds) shouldEqual 3
totalBatches(log.rounds) shouldEqual 2
}).unsafeToFuture
}

Expand All @@ -186,12 +181,11 @@ class FetchBatchingTests extends FetchSpec {

val io = Fetch.runLog[IO](fetch)

io.map({
case (log, result) =>
result shouldEqual List(1, 2, 3)
log.rounds.size shouldEqual 1
totalFetched(log.rounds) shouldEqual 3
totalBatches(log.rounds) shouldEqual 2
io.map({ case (log, result) =>
result shouldEqual List(1, 2, 3)
log.rounds.size shouldEqual 1
totalFetched(log.rounds) shouldEqual 3
totalBatches(log.rounds) shouldEqual 2
}).unsafeToFuture
}

Expand All @@ -209,9 +203,8 @@ class FetchBatchingTests extends FetchSpec {
ids.toList.traverse(fetchBatchedDataBigId[IO])
)

io.map({
case (log, result) =>
result shouldEqual ids.map(_.toString)
io.map({ case (log, result) =>
result shouldEqual ids.map(_.toString)
}).unsafeToFuture
}
}
Loading

0 comments on commit 58f9197

Please sign in to comment.