Skip to content

Commit a96155d

Browse files
committed
[ADAM-1834] Add proper extensions for SAM/BAM/CRAM output formats.
Resolves #1834.
1 parent d213820 commit a96155d

File tree

5 files changed

+11
-11
lines changed

5 files changed

+11
-11
lines changed

adam-core/src/main/scala/org/bdgenomics/adam/rdd/read/ADAMBAMOutputFormat.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ class ADAMBAMOutputFormat[K]
4747
readSAMHeaderFrom(path, conf)
4848

4949
// now that we have the header set, we need to make a record reader
50-
return new KeyIgnoringBAMRecordWriter[K](getDefaultWorkFile(context, ""),
50+
return new KeyIgnoringBAMRecordWriter[K](getDefaultWorkFile(context, ".bam"),
5151
header,
5252
true,
5353
context)
@@ -84,7 +84,7 @@ class ADAMBAMOutputFormatHeaderLess[K]
8484
readSAMHeaderFrom(path, conf)
8585

8686
// now that we have the header set, we need to make a record reader
87-
return new KeyIgnoringBAMRecordWriter[K](getDefaultWorkFile(context, ""),
87+
return new KeyIgnoringBAMRecordWriter[K](getDefaultWorkFile(context, ".bam"),
8888
header,
8989
false,
9090
context)

adam-core/src/main/scala/org/bdgenomics/adam/rdd/read/ADAMCRAMOutputFormat.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ class ADAMCRAMOutputFormat[K]
4747
readSAMHeaderFrom(path, conf)
4848

4949
// now that we have the header set, we need to make a record reader
50-
return new KeyIgnoringCRAMRecordWriter[K](getDefaultWorkFile(context, ""),
50+
return new KeyIgnoringCRAMRecordWriter[K](getDefaultWorkFile(context, ".cram"),
5151
header,
5252
true,
5353
context)
@@ -84,7 +84,7 @@ class ADAMCRAMOutputFormatHeaderLess[K]
8484
readSAMHeaderFrom(path, conf)
8585

8686
// now that we have the header set, we need to make a record reader
87-
return new KeyIgnoringCRAMRecordWriter[K](getDefaultWorkFile(context, ""),
87+
return new KeyIgnoringCRAMRecordWriter[K](getDefaultWorkFile(context, ".cram"),
8888
header,
8989
false,
9090
context)

adam-core/src/main/scala/org/bdgenomics/adam/rdd/read/ADAMSAMOutputFormat.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ class ADAMSAMOutputFormat[K]
4444
readSAMHeaderFrom(path, conf)
4545

4646
// now that we have the header set, we need to make a record reader
47-
return new KeyIgnoringSAMRecordWriter(getDefaultWorkFile(context, ""),
47+
return new KeyIgnoringSAMRecordWriter(getDefaultWorkFile(context, ".sam"),
4848
header,
4949
true,
5050
context)
@@ -71,7 +71,7 @@ class ADAMSAMOutputFormatHeaderLess[K]
7171
readSAMHeaderFrom(path, conf)
7272

7373
// now that we have the header set, we need to make a record reader
74-
return new KeyIgnoringSAMRecordWriter(getDefaultWorkFile(context, ""),
74+
return new KeyIgnoringSAMRecordWriter(getDefaultWorkFile(context, ".sam"),
7575
header,
7676
false,
7777
context)

adam-core/src/test/scala/org/bdgenomics/adam/rdd/read/AlignmentRecordRDDSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -296,7 +296,7 @@ class AlignmentRecordRDDSuite extends ADAMFunSuite {
296296
ardd.saveAsSam(tempFile.toAbsolutePath.toString + "/reads12.sam",
297297
asType = Some(SAMFormat.SAM))
298298

299-
val rdd12B = sc.loadBam(tempFile.toAbsolutePath.toString + "/reads12.sam/part-r-00000")
299+
val rdd12B = sc.loadBam(tempFile.toAbsolutePath.toString + "/reads12.sam/part-r-00000.sam")
300300

301301
assert(rdd12B.rdd.count() === rdd12A.rdd.count())
302302

@@ -358,7 +358,7 @@ class AlignmentRecordRDDSuite extends ADAMFunSuite {
358358
asSingleFile = false,
359359
isSorted = true)
360360

361-
val rddB = sc.loadBam(tempFile + "/part-r-00000")
361+
val rddB = sc.loadBam(tempFile + "/part-r-00000.cram")
362362

363363
assert(rddB.rdd.count() === rddA.rdd.count())
364364

adam-core/src/test/scala/org/bdgenomics/adam/util/ParallelFileMergerSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ class ParallelFileMergerSuite extends ADAMFunSuite {
155155

156156
val fs = FileSystem.get(sc.hadoopConfiguration)
157157
val filesToMerge = (Seq(outPath + "_head") ++ (0 until 4).map(i => {
158-
outPath + "_tail/part-r-0000%d".format(i)
158+
outPath + "_tail/part-r-0000%d.sam".format(i)
159159
})).map(new Path(_))
160160
.map(p => (p.toString, 0L, fs.getFileStatus(p).getLen().toLong - 1L))
161161

@@ -179,7 +179,7 @@ class ParallelFileMergerSuite extends ADAMFunSuite {
179179

180180
val fs = FileSystem.get(sc.hadoopConfiguration)
181181
val filesToMerge = (Seq(outPath + "_head") ++ (0 until 4).map(i => {
182-
outPath + "_tail/part-r-0000%d".format(i)
182+
outPath + "_tail/part-r-0000%d.bam".format(i)
183183
})).map(new Path(_))
184184
.map(p => (p.toString, 0L, fs.getFileStatus(p).getLen().toLong - 1L))
185185

@@ -206,7 +206,7 @@ class ParallelFileMergerSuite extends ADAMFunSuite {
206206

207207
val fs = FileSystem.get(sc.hadoopConfiguration)
208208
val filesToMerge = (Seq(outPath + "_head") ++ (0 until 4).map(i => {
209-
outPath + "_tail/part-r-0000%d".format(i)
209+
outPath + "_tail/part-r-0000%d.cram".format(i)
210210
})).map(new Path(_))
211211
.map(p => (p.toString, 0L, fs.getFileStatus(p).getLen().toLong - 1L))
212212

0 commit comments

Comments
 (0)