File tree Expand file tree Collapse file tree 2 files changed +17
-1
lines changed
main/scala/com/acervera/osm4scala/spark
test/scala/com/acervera/osm4scala/spark Expand file tree Collapse file tree 2 files changed +17
-1
lines changed Original file line number Diff line number Diff line change @@ -80,7 +80,7 @@ object OsmPbfRowIterator {
80
80
81
81
private def populateInfo (info : Info ): InternalRow = InternalRow .fromSeq(infoSchema.fieldNames.map{
82
82
case FIELD_INFO_VERSION => info.version.getOrElse(null )
83
- case FIELD_INFO_TIMESTAMP => info.timestamp.map(inst => inst.toEpochMilli).getOrElse(null )
83
+ case FIELD_INFO_TIMESTAMP => info.timestamp.map(inst => inst.toEpochMilli * 1000 ).getOrElse(null )
84
84
case FIELD_INFO_CHANGESET => info.changeset.getOrElse(null )
85
85
case FIELD_INFO_USER_ID => info.userId.getOrElse(null )
86
86
case FIELD_INFO_USER_NAME => info.userName.map(UTF8String .fromString).orNull
Original file line number Diff line number Diff line change @@ -36,6 +36,7 @@ import org.scalatest.prop.TableDrivenPropertyChecks
36
36
import org .scalatest .wordspec .AnyWordSpec
37
37
38
38
import java .io .File
39
+ import java .sql .Timestamp
39
40
import scala .util .Random
40
41
41
42
object SourcesForTesting {
@@ -145,6 +146,21 @@ class OsmPbfFormatSpec extends AnyWordSpec with Matchers with SparkSessionBefore
145
146
node171946.getAs[Long ](" id" ) shouldBe 171946L
146
147
}
147
148
149
+ " read info" in {
150
+ // <node id="1699777711" version="2" timestamp="2018-03-26T07:24:26Z" lat="43.7402163" lon="7.4281505"/>
151
+ // Epoch 1522049066000 = Monday, March 26, 2018 7:24:26 AM
152
+ val node1699777711 = loadOsmPbf(spark, monacoPath)
153
+ .select(
154
+ col(" id" ),
155
+ col(" info.version" ) as " version" ,
156
+ col(" info.timestamp" ) as " timestamp"
157
+ ).filter(" id == 1699777711" ).collect()(0 )
158
+
159
+ node1699777711.getAs[Long ](" id" ) shouldBe 1699777711L
160
+ node1699777711.getAs[Integer ](" version" ) should be(2 )
161
+ node1699777711.getAs[Timestamp ](" timestamp" ).toInstant.toEpochMilli shouldBe 1522049066000L
162
+ }
163
+
148
164
" read null info" in {
149
165
val node171946 = loadOsmPbf(spark, madridPath).select(" id" , " info" ).filter(" id == 171946" ).collect()(0 )
150
166
node171946.getAs[Long ](" id" ) shouldBe 171946L
You can’t perform that action at this time.
0 commit comments