`[DEBUG] 2020-06-13 23:34:23,894 method:org.apache.htrace.core.Tracer
Builder.loadSamplers(Tracer.java:106)sampler.classes=;loadednosamplers[TRACE]2020−06−1323:34:24,131method:org.apache.htrace.core.TracerId.<init>(TracerId.java:134)ProcessID(fmt=[TRACE]2020−06−1323:34:24,133method:org.apache.htrace.core.TracerPool.addTracer(TracerPool.java:264)TracerPool(Global):addingtracerTracer(FSClient/192.168.50.66)[DEBUG]2020−06−1323:34:24,133method:org.apache.htrace.core.TracerBuilder.loadSamplers(Tracer.java:106)
sampler.classes = ; loaded no samplers
[TRACE] 2020-06-13 23:34:24,131 method:org.apache.htrace.core.TracerId.<init>(TracerId.java:134)
ProcessID(fmt=%{tname}/%{ip}): computed process ID of "FSClient/192.168.50.66"
[TRACE] 2020-06-13 23:34:24,133 method:org.apache.htrace.core.TracerPool.addTracer(TracerPool.java:264)
TracerPool(Global): adding tracer Tracer(FSClient/192.168.50.66)
[DEBUG] 2020-06-13 23:34:24,133 method:org.apache.htrace.core.Tracer
B
u
i
l
d
e
r
.
l
o
a
d
S
a
m
p
l
e
r
s
(
T
r
a
c
e
r
.
j
a
v
a
:
1
0
6
)
s
a
m
p
l
e
r
.
c
l
a
s
s
e
s
=
;
l
o
a
d
e
d
n
o
s
a
m
p
l
e
r
s
[
T
R
A
C
E
]
2
0
2
0
−
0
6
−
1
3
2
3
:
3
4
:
2
4
,
1
3
1
m
e
t
h
o
d
:
o
r
g
.
a
p
a
c
h
e
.
h
t
r
a
c
e
.
c
o
r
e
.
T
r
a
c
e
r
I
d
.
<
i
n
i
t
>
(
T
r
a
c
e
r
I
d
.
j
a
v
a
:
1
3
4
)
P
r
o
c
e
s
s
I
D
(
f
m
t
=
[
T
R
A
C
E
]
2
0
2
0
−
0
6
−
1
3
2
3
:
3
4
:
2
4
,
1
3
3
m
e
t
h
o
d
:
o
r
g
.
a
p
a
c
h
e
.
h
t
r
a
c
e
.
c
o
r
e
.
T
r
a
c
e
r
P
o
o
l
.
a
d
d
T
r
a
c
e
r
(
T
r
a
c
e
r
P
o
o
l
.
j
a
v
a
:
2
6
4
)
T
r
a
c
e
r
P
o
o
l
(
G
l
o
b
a
l
)
:
a
d
d
i
n
g
t
r
a
c
e
r
T
r
a
c
e
r
(
F
S
C
l
i
e
n
t
/
1
9
2
.
1
6
8
.
5
0
.
6
6
)
[
D
E
B
U
G
]
2
0
2
0
−
0
6
−
1
3
2
3
:
3
4
:
2
4
,
1
3
3
m
e
t
h
o
d
:
o
r
g
.
a
p
a
c
h
e
.
h
t
r
a
c
e
.
c
o
r
e
.
T
r
a
c
e
r
Builder.loadSpanReceivers(Tracer.java:128)
span.receiver.classes = ; loaded no span receivers
[TRACE] 2020-06-13 23:34:24,133 method:org.apache.htrace.core.Tracer$Builder.build(Tracer.java:165)
Created Tracer(FSClient/192.168.50.66) for FSClient
[DEBUG] 2020-06-13 23:34:25,263 method:org.apache.hadoop.fs.RawLocalFileSystem.mkOneDirWithMode(RawLocalFileSystem.java:538)
NativeIO.createDirectoryWithMode error, path = \tmp\hadoop\mapred\staging\hadoop45363004.staging\job_local45363004_0001, mode = 755
183: 当文件已存在时,无法创建该文件。
at org.apache.hadoop.io.nativeio.NativeIO$Windows.createDirectoryWithMode0(Native Method)
at org.apache.hadoop.io.nativeio.NativeIO$Windows.createDirectoryWithMode(NativeIO.java:560)
at org.apache.hadoop.fs.RawLocalFileSystem.mkOneDirWithMode(RawLocalFileSystem.java:534)
at org.apache.hadoop.fs.RawLocalFileSystem.mkdirsWithOptionalPermission(RawLocalFileSystem.java:587)
at org.apache.hadoop.fs.RawLocalFileSystem.mkdirs(RawLocalFileSystem.java:559)
at org.apache.hadoop.fs.ChecksumFileSystem.mkdirs(ChecksumFileSystem.java:705)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:456)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:443)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:1118)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:1098)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:987)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:975)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:652)
at org.apache.hadoop.mapreduce.split.JobSplitWriter.createFile(JobSplitWriter.java:102)
at org.apache.hadoop.mapreduce.split.JobSplitWriter.createSplitFiles(JobSplitWriter.java:78)
at org.apache.hadoop.mapreduce.JobSubmitter.writeNewSplits(JobSubmitter.java:316)
at org.apache.hadoop.mapreduce.JobSubmitter.writeSplits(JobSubmitter.java:327)
at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:200)
at org.apache.hadoop.mapreduce.Job$11.run(Job.java:1570)
at org.apache.hadoop.mapreduce.Job$11.run(Job.java:1567)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:1567)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1588)
at com.xinke.bigdata.hadoop.mr.wc.WordCountApp.main(WordCountApp.java:50)
[DEBUG] 2020-06-13 23:34:25,266 method:org.apache.hadoop.fs.RawLocalFileSystem.mkOneDirWithMode(RawLocalFileSystem.java:538)
NativeIO.createDirectoryWithMode error, path = \tmp\hadoop\mapred\staging\hadoop45363004.staging\job_local45363004_0001, mode = 755
183: 当文件已存在时,无法创建该文件。`