| Summary: | Crash in NFS | ||
|---|---|---|---|
| Product: | [Community] GlusterFS | Reporter: | Anand Avati <aavati> |
| Component: | nfs | Assignee: | Shehjar Tikoo <shehjart> |
| Status: | CLOSED CURRENTRELEASE | QA Contact: | |
| Severity: | medium | Docs Contact: | |
| Priority: | low | ||
| Version: | mainline | CC: | chrisw, gluster-bugs, raveenpl, saurabh, vbhat, vijaykumar |
| Target Milestone: | --- | ||
| Target Release: | --- | ||
| Hardware: | All | ||
| OS: | Linux | ||
| Whiteboard: | |||
| Fixed In Version: | Doc Type: | Bug Fix | |
| Doc Text: | Story Points: | --- | |
| Clone Of: | Environment: | ||
| Last Closed: | Type: | --- | |
| Regression: | RTP | Mount Type: | nfs |
| Documentation: | DNR | CRM: | |
| Verified Versions: | Category: | --- | |
| oVirt Team: | --- | RHEL 7.3 requirements from Atomic Host: | |
| Cloudforms Team: | --- | Target Upstream Version: | |
|
Description
Anand Avati
2011-03-01 18:23:17 UTC
PATCH: http://patches.gluster.com/patch/6319 in master (rpcsvc: safe mem_put of req in nfs_rpcsvc_submit_vectors) Core was generated by `/opt/glusterfs/sbin/glusterfsd -f /etc/glusterfs/nfs.vol --log-file=/var/log/gl'.
Program terminated with signal 11, Segmentation fault.
#0 0x00002b6cc5d56f5c in rpcsvc_submit_generic (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:1607
1607 if ((rpcsvc_request_accepted (req)) &&
(gdb) bt
#0 0x00002b6cc5d56f5c in rpcsvc_submit_generic (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:1607
#1 0x00002b6cc5d595be in rpcsvc_submit_message (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:2715
#2 0x00002b6cc5b1f38b in nfs3svc_submit_reply (req=0x1143b300, arg=0x7fff417f5090,
sfunc=0x2b6cc5d4dba1 <xdr_serialize_write3res>) at nfs3.c:308
#3 0x00002b6cc5b22c87 in nfs3_write_reply (req=0x1143b300, stat=NFS3_OK, count=65536,
stable=FILE_SYNC, wverf=1298468948, prestat=0x2aaac00e0310, poststat=0x7fff417f5280)
at nfs3.c:1576
#4 0x00002b6cc5b22d86 in nfs3svc_write_fsync_cbk (frame=0x2aaac0094298, cookie=0x10fed7f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at nfs3.c:1601
#5 0x00002b6cc5b16d43 in nfs_fop_fsync_cbk (frame=0x2aaac0094298, cookie=0x10fed7f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at nfs-fops.c:1172
#6 0x00002b6cc58f6820 in iot_fsync_cbk (frame=0x2aaac0094320, cookie=0x2aaab42b79f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at io-threads.c:893
#7 0x00002b6cc56e0019 in client_fsync_cbk (frame=0x2aaab42b79f0, hdr=0x2aaac003e0c0,
hdrlen=268, iobuf=0x0) at client-protocol.c:4324
#8 0x00002b6cc56e5af8 in protocol_client_interpret (this=0x10fe38b0, trans=0x2aaaac004010,
hdr_p=0x2aaac003e0c0 "", hdrlen=268, iobuf=0x0) at client-protocol.c:6137
#9 0x00002b6cc56e67be in protocol_client_pollin (this=0x10fe38b0, trans=0x2aaaac004010)
at client-protocol.c:6435
#10 0x00002b6cc56e6e35 in notify (this=0x10fe38b0, event=2, data=0x2aaaac004010)
at client-protocol.c:6554
#11 0x00002b6cc4bf5b7c in xlator_notify (xl=0x10fe38b0, event=2, data=0x2aaaac004010)
at xlator.c:919
#12 0x00002aaaaaf09e96 in socket_event_poll_in (this=0x2aaaac004010) at socket.c:731
#13 0x00002aaaaaf0a18b in socket_event_handler (fd=18, idx=10, data=0x2aaaac004010,
---Type <return> to continue, or q <return> to quit---q
poll_in=1Quit
(gdb) fr 0
#0 0x00002b6cc5d56f5c in rpcsvc_submit_generic (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:1607
1607 if ((rpcsvc_request_accepted (req)) &&
(gdb) l
1602 * references for the actors to which the request was handed plus one
1603 * reference maintained by the RPC layer. By unrefing for a case where
1604 * no actor was called, we will be losing the ref held for the RPC
1605 * layer.
1606 */
1607 if ((rpcsvc_request_accepted (req)) &&
1608 (rpcsvc_request_accepted_success (req)))
1609 rpcsvc_conn_unref (conn);
1610
1611 return ret;
(gdb) bt
#0 0x00002b6cc5d56f5c in rpcsvc_submit_generic (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:1607
#1 0x00002b6cc5d595be in rpcsvc_submit_message (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:2715
#2 0x00002b6cc5b1f38b in nfs3svc_submit_reply (req=0x1143b300, arg=0x7fff417f5090,
sfunc=0x2b6cc5d4dba1 <xdr_serialize_write3res>) at nfs3.c:308
#3 0x00002b6cc5b22c87 in nfs3_write_reply (req=0x1143b300, stat=NFS3_OK, count=65536,
stable=FILE_SYNC, wverf=1298468948, prestat=0x2aaac00e0310, poststat=0x7fff417f5280)
at nfs3.c:1576
#4 0x00002b6cc5b22d86 in nfs3svc_write_fsync_cbk (frame=0x2aaac0094298, cookie=0x10fed7f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at nfs3.c:1601
#5 0x00002b6cc5b16d43 in nfs_fop_fsync_cbk (frame=0x2aaac0094298, cookie=0x10fed7f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at nfs-fops.c:1172
#6 0x00002b6cc58f6820 in iot_fsync_cbk (frame=0x2aaac0094320, cookie=0x2aaab42b79f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at io-threads.c:893
#7 0x00002b6cc56e0019 in client_fsync_cbk (frame=0x2aaab42b79f0, hdr=0x2aaac003e0c0,
hdrlen=268, iobuf=0x0) at client-protocol.c:4324
#8 0x00002b6cc56e5af8 in protocol_client_interpret (this=0x10fe38b0, trans=0x2aaaac004010,
hdr_p=0x2aaac003e0c0 "", hdrlen=268, iobuf=0x0) at client-protocol.c:6137
#9 0x00002b6cc56e67be in protocol_client_pollin (this=0x10fe38b0, trans=0x2aaaac004010)
at client-protocol.c:6435
#10 0x00002b6cc56e6e35 in notify (this=0x10fe38b0, event=2, data=0x2aaaac004010)
at client-protocol.c:6554
#11 0x00002b6cc4bf5b7c in xlator_notify (xl=0x10fe38b0, event=2, data=0x2aaaac004010)
at xlator.c:919
#12 0x00002aaaaaf09e96 in socket_event_poll_in (this=0x2aaaac004010) at socket.c:731
#13 0x00002aaaaaf0a18b in socket_event_handler (fd=18, idx=10, data=0x2aaaac004010,
---Type <return> to continue, or q <return> to quit---q
poll_in=1Quit
(gdb) fr 0
#0 0x00002b6cc5d56f5c in rpcsvc_submit_generic (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:1607
1607 if ((rpcsvc_request_accepted (req)) &&
(gdb) up
#1 0x00002b6cc5d595be in rpcsvc_submit_message (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:2715
2715 return rpcsvc_submit_generic (req, msgvec, msg);
(gdb) down
#0 0x00002b6cc5d56f5c in rpcsvc_submit_generic (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:1607
1607 if ((rpcsvc_request_accepted (req)) &&
(gdb) bt
#0 0x00002b6cc5d56f5c in rpcsvc_submit_generic (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:1607
#1 0x00002b6cc5d595be in rpcsvc_submit_message (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:2715
#2 0x00002b6cc5b1f38b in nfs3svc_submit_reply (req=0x1143b300, arg=0x7fff417f5090,
sfunc=0x2b6cc5d4dba1 <xdr_serialize_write3res>) at nfs3.c:308
#3 0x00002b6cc5b22c87 in nfs3_write_reply (req=0x1143b300, stat=NFS3_OK, count=65536,
stable=FILE_SYNC, wverf=1298468948, prestat=0x2aaac00e0310, poststat=0x7fff417f5280)
at nfs3.c:1576
#4 0x00002b6cc5b22d86 in nfs3svc_write_fsync_cbk (frame=0x2aaac0094298, cookie=0x10fed7f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at nfs3.c:1601
#5 0x00002b6cc5b16d43 in nfs_fop_fsync_cbk (frame=0x2aaac0094298, cookie=0x10fed7f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at nfs-fops.c:1172
#6 0x00002b6cc58f6820 in iot_fsync_cbk (frame=0x2aaac0094320, cookie=0x2aaab42b79f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at io-threads.c:893
#7 0x00002b6cc56e0019 in client_fsync_cbk (frame=0x2aaab42b79f0, hdr=0x2aaac003e0c0,
hdrlen=268, iobuf=0x0) at client-protocol.c:4324
#8 0x00002b6cc56e5af8 in protocol_client_interpret (this=0x10fe38b0, trans=0x2aaaac004010,
hdr_p=0x2aaac003e0c0 "", hdrlen=268, iobuf=0x0) at client-protocol.c:6137
#9 0x00002b6cc56e67be in protocol_client_pollin (this=0x10fe38b0, trans=0x2aaaac004010)
at client-protocol.c:6435
#10 0x00002b6cc56e6e35 in notify (this=0x10fe38b0, event=2, data=0x2aaaac004010)
at client-protocol.c:6554
#11 0x00002b6cc4bf5b7c in xlator_notify (xl=0x10fe38b0, event=2, data=0x2aaaac004010)
at xlator.c:919
#12 0x00002aaaaaf09e96 in socket_event_poll_in (this=0x2aaaac004010) at socket.c:731
#13 0x00002aaaaaf0a18b in socket_event_handler (fd=18, idx=10, data=0x2aaaac004010,
---Type <return> to continue, or q <return> to quit---q
poll_in=1Quit
(gdb) l
1602 * references for the actors to which the request was handed plus one
1603 * reference maintained by the RPC layer. By unrefing for a case where
1604 * no actor was called, we will be losing the ref held for the RPC
1605 * layer.
1606 */
1607 if ((rpcsvc_request_accepted (req)) &&
1608 (rpcsvc_request_accepted_success (req)))
1609 rpcsvc_conn_unref (conn);
1610
1611 return ret;
(gdb) fr 0
#0 0x00002b6cc5d56f5c in rpcsvc_submit_generic (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:1607
1607 if ((rpcsvc_request_accepted (req)) &&
(gdb) by
Undefined command: "by". Try "help".
(gdb) bt
#0 0x00002b6cc5d56f5c in rpcsvc_submit_generic (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:1607
#1 0x00002b6cc5d595be in rpcsvc_submit_message (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:2715
#2 0x00002b6cc5b1f38b in nfs3svc_submit_reply (req=0x1143b300, arg=0x7fff417f5090,
sfunc=0x2b6cc5d4dba1 <xdr_serialize_write3res>) at nfs3.c:308
#3 0x00002b6cc5b22c87 in nfs3_write_reply (req=0x1143b300, stat=NFS3_OK, count=65536,
stable=FILE_SYNC, wverf=1298468948, prestat=0x2aaac00e0310, poststat=0x7fff417f5280)
at nfs3.c:1576
#4 0x00002b6cc5b22d86 in nfs3svc_write_fsync_cbk (frame=0x2aaac0094298, cookie=0x10fed7f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at nfs3.c:1601
#5 0x00002b6cc5b16d43 in nfs_fop_fsync_cbk (frame=0x2aaac0094298, cookie=0x10fed7f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at nfs-fops.c:1172
#6 0x00002b6cc58f6820 in iot_fsync_cbk (frame=0x2aaac0094320, cookie=0x2aaab42b79f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at io-threads.c:893
#7 0x00002b6cc56e0019 in client_fsync_cbk (frame=0x2aaab42b79f0, hdr=0x2aaac003e0c0,
hdrlen=268, iobuf=0x0) at client-protocol.c:4324
#8 0x00002b6cc56e5af8 in protocol_client_interpret (this=0x10fe38b0, trans=0x2aaaac004010,
hdr_p=0x2aaac003e0c0 "", hdrlen=268, iobuf=0x0) at client-protocol.c:6137
#9 0x00002b6cc56e67be in protocol_client_pollin (this=0x10fe38b0, trans=0x2aaaac004010)
at client-protocol.c:6435
#10 0x00002b6cc56e6e35 in notify (this=0x10fe38b0, event=2, data=0x2aaaac004010)
at client-protocol.c:6554
#11 0x00002b6cc4bf5b7c in xlator_notify (xl=0x10fe38b0, event=2, data=0x2aaaac004010)
at xlator.c:919
#12 0x00002aaaaaf09e96 in socket_event_poll_in (this=0x2aaaac004010) at socket.c:731
#13 0x00002aaaaaf0a18b in socket_event_handler (fd=18, idx=10, data=0x2aaaac004010,
---Type <return> to continue, or q <return> to quit---w
poll_in=1, poll_out=0, poll_err=0) at socket.c:831
#14 0x00002b6cc4c1c2b9 in event_dispatch_epoll_handler (event_pool=0x10fd9340,
events=0x2aaaac009dc0, i=0) at event.c:804
#15 0x00002b6cc4c1c48e in event_dispatch_epoll (event_pool=0x10fd9340) at event.c:867
#16 0x00002b6cc4c1c7a4 in event_dispatch (event_pool=0x10fd9340) at event.c:975
#17 0x0000000000406344 in main (argc=4, argv=0x7fff417f5f58) at glusterfsd.c:1494
(gdb) fr 3
#3 0x00002b6cc5b22c87 in nfs3_write_reply (req=0x1143b300, stat=NFS3_OK, count=65536,
stable=FILE_SYNC, wverf=1298468948, prestat=0x2aaac00e0310, poststat=0x7fff417f5280)
at nfs3.c:1576
1576 nfs3svc_submit_reply (req, (void *)&res,
(gdb) bt full
#0 0x00002b6cc5d56f5c in rpcsvc_submit_generic (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:1607
ret = 0
replyiob = 0x11383ae8
recordhdr = {iov_base = 0x2aaac52a0000, iov_len = 28}
conn = 0x112737f0
__FUNCTION__ = "rpcsvc_submit_generic"
#1 0x00002b6cc5d595be in rpcsvc_submit_message (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:2715
No locals.
#2 0x00002b6cc5b1f38b in nfs3svc_submit_reply (req=0x1143b300, arg=0x7fff417f5090,
sfunc=0x2b6cc5d4dba1 <xdr_serialize_write3res>) at nfs3.c:308
outmsg = {iov_base = 0x2aaac5240000, iov_len = 136}
iob = 0x11383a70
ret = -1
__FUNCTION__ = "nfs3svc_submit_reply"
#3 0x00002b6cc5b22c87 in nfs3_write_reply (req=0x1143b300, stat=NFS3_OK, count=65536,
stable=FILE_SYNC, wverf=1298468948, prestat=0x2aaac00e0310, poststat=0x7fff417f5280)
at nfs3.c:1576
res = {status = NFS3_OK, write3res_u = {resok = {file_wcc = {before = {
attributes_follow = 1, pre_op_attr_u = {attributes = {
size = 268435456000, mtime = {seconds = 1298486400, nseconds = 0},
ctime = {seconds = 1298486400, nseconds = 0}}}}, after = {
attributes_follow = 1, post_op_attr_u = {attributes = {type = NF3REG,
mode = 384, nlink = 1, uid = 0, gid = 0, size = 268435456000,
used = 258549837312, rdev = {specdata1 = 0, specdata2 = 0}, fsid = 5,
fileid = 1342177357, atime = {seconds = 1298486395, nseconds = 0},
mtime = {seconds = 1298486400, nseconds = 0}, ctime = {
seconds = 1298486400, nseconds = 0}}}}}, count = 65536,
committed = FILE_SYNC, verf = "T\020eM\000\000\000"}, resfail = {file_wcc = {
---Type <return> to continue, or q <return> to quit---
before = {attributes_follow = 1, pre_op_attr_u = {attributes = {
size = 268435456000, mtime = {seconds = 1298486400, nseconds = 0},
ctime = {seconds = 1298486400, nseconds = 0}}}}, after = {
attributes_follow = 1, post_op_attr_u = {attributes = {type = NF3REG,
mode = 384, nlink = 1, uid = 0, gid = 0, size = 268435456000,
used = 258549837312, rdev = {specdata1 = 0, specdata2 = 0}, fsid = 5,
fileid = 1342177357, atime = {seconds = 1298486395, nseconds = 0},
mtime = {seconds = 1298486400, nseconds = 0}, ctime = {
seconds = 1298486400, nseconds = 0}}}}}}}}
xlid = 5
#4 0x00002b6cc5b22d86 in nfs3svc_write_fsync_cbk (frame=0x2aaac0094298, cookie=0x10fed7f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at nfs3.c:1601
nfs3 = 0x2aaaac00bad0
stat = NFS3_OK
cs = 0x2aaac00e0140
#5 0x00002b6cc5b16d43 in nfs_fop_fsync_cbk (frame=0x2aaac0094298, cookie=0x10fed7f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at nfs-fops.c:1172
nfl = 0x2aaac008e550
progcbk = 0x2b6cc5b22c8e <nfs3svc_write_fsync_cbk>
#6 0x00002b6cc58f6820 in iot_fsync_cbk (frame=0x2aaac0094320, cookie=0x2aaab42b79f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at io-threads.c:893
fn = 0x2b6cc5b16c56 <nfs_fop_fsync_cbk>
_parent = 0x2aaac0094298
old_THIS = 0x10feab30
#7 0x00002b6cc56e0019 in client_fsync_cbk (frame=0x2aaab42b79f0, hdr=0x2aaac003e0c0,
hdrlen=268, iobuf=0x0) at client-protocol.c:4324
fn = 0x2b6cc58f672a <iot_fsync_cbk>
---Type <return> to continue, or q <return> to quit---
_parent = 0x2aaac0094320
old_THIS = 0x10fe38b0
prestat = {ia_ino = 1342177357, ia_gen = 0, ia_dev = 0, ia_type = IA_IFREG,
ia_prot = {suid = 0 '\000', sgid = 0 '\000', sticky = 0 '\000', owner = {
read = 1 '\001', write = 1 '\001', exec = 0 '\000'}, group = {
read = 0 '\000', write = 0 '\000', exec = 0 '\000'}, other = {
read = 0 '\000', write = 0 '\000', exec = 0 '\000'}}, ia_nlink = 1,
ia_uid = 0, ia_gid = 0, ia_rdev = 0, ia_size = 268435456000, ia_blksize = 131072,
ia_blocks = 504980151, ia_atime = 1298486395, ia_atime_nsec = 501149067,
ia_mtime = 1298486400, ia_mtime_nsec = 705963094, ia_ctime = 1298486400,
ia_ctime_nsec = 705963094}
poststat = {ia_ino = 1342177357, ia_gen = 0, ia_dev = 5, ia_type = IA_IFREG,
ia_prot = {suid = 0 '\000', sgid = 0 '\000', sticky = 0 '\000', owner = {
read = 1 '\001', write = 1 '\001', exec = 0 '\000'}, group = {
read = 0 '\000', write = 0 '\000', exec = 0 '\000'}, other = {
read = 0 '\000', write = 0 '\000', exec = 0 '\000'}}, ia_nlink = 1,
ia_uid = 0, ia_gid = 0, ia_rdev = 0, ia_size = 268435456000, ia_blksize = 131072,
ia_blocks = 504980151, ia_atime = 1298486395, ia_atime_nsec = 501149067,
ia_mtime = 1298486400, ia_mtime_nsec = 705963094, ia_ctime = 1298486400,
ia_ctime_nsec = 705963094}
rsp = 0x2aaac003e12c
op_ret = 0
op_errno = 0
#8 0x00002b6cc56e5af8 in protocol_client_interpret (this=0x10fe38b0, trans=0x2aaaac004010,
hdr_p=0x2aaac003e0c0 "", hdrlen=268, iobuf=0x0) at client-protocol.c:6137
ret = -1
frame = 0x2aaab42b79f0
hdr = 0x2aaac003e0c0
callid = 762459
type = 4
---Type <return> to continue, or q <return> to quit---
op = 15
conn = 0x2aaaac004430
__FUNCTION__ = "protocol_client_interpret"
#9 0x00002b6cc56e67be in protocol_client_pollin (this=0x10fe38b0, trans=0x2aaaac004010)
at client-protocol.c:6435
conf = 0x2aaaac003f90
ret = 0
iobuf = 0x0
hdr = 0x2aaac003e0c0 ""
hdrlen = 268
#10 0x00002b6cc56e6e35 in notify (this=0x10fe38b0, event=2, data=0x2aaaac004010)
at client-protocol.c:6554
i = 0
ret = -1
child_down = 1
was_not_down = 0
trans = 0x2aaaac004010
conn = 0x0
conf = 0x2aaaac003f90
parent = 0x0
__FUNCTION__ = "notify"
#11 0x00002b6cc4bf5b7c in xlator_notify (xl=0x10fe38b0, event=2, data=0x2aaaac004010)
at xlator.c:919
old_THIS = 0x2b6cc4e49640
ret = 0
#12 0x00002aaaaaf09e96 in socket_event_poll_in (this=0x2aaaac004010) at socket.c:731
ret = 0
#13 0x00002aaaaaf0a18b in socket_event_handler (fd=18, idx=10, data=0x2aaaac004010,
poll_in=1, poll_out=0, poll_err=0) at socket.c:831
this = 0x2aaaac004010
---Type <return> to continue, or q <return> to quit---
priv = 0x2aaaac004360
ret = 0
#14 0x00002b6cc4c1c2b9 in event_dispatch_epoll_handler (event_pool=0x10fd9340,
events=0x2aaaac009dc0, i=0) at event.c:804
event_data = 0x2aaaac009dc4
handler = 0x2aaaaaf0a0d4 <socket_event_handler>
data = 0x2aaaac004010
idx = 10
ret = -1
__FUNCTION__ = "event_dispatch_epoll_handler"
#15 0x00002b6cc4c1c48e in event_dispatch_epoll (event_pool=0x10fd9340) at event.c:867
events = 0x2aaaac009dc0
size = 1
i = 0
ret = 1
__FUNCTION__ = "event_dispatch_epoll"
#16 0x00002b6cc4c1c7a4 in event_dispatch (event_pool=0x10fd9340) at event.c:975
ret = -1
__FUNCTION__ = "event_dispatch"
#17 0x0000000000406344 in main (argc=4, argv=0x7fff417f5f58) at glusterfsd.c:1494
ctx = 0x10fd9010
cmd_args = 0x10fd9010
pool = 0x10fda8b0
stbuf = {st_dev = 15774436, st_ino = 194, st_nlink = 5393833408,
st_mode = 3303387512, st_uid = 11116, st_gid = 3300766560, pad0 = 11116,
st_rdev = 140734292254416, st_size = 47746157228032, st_blksize = 4199511,
st_blocks = 227633266916, st_atim = {tv_sec = 6442450944, tv_nsec = 0},
st_mtim = {tv_sec = 0, tv_nsec = 0}, st_ctim = {tv_sec = 15774436,
tv_nsec = 194}, __unused = {15774436, 140734292254278, 140734292254279}}
tmp_logfile = '\000' <repeats 1023 times>
---Type <return> to continue, or q <return> to quit---
tmp_logfile_dyn = 0x0
tmp_logfilebase = 0x0
timestr = '\000' <repeats 255 times>
utime = 1298468948
tm = 0x0
ret = 0
lim = {rlim_cur = 18446744073709551615, rlim_max = 18446744073709551615}
specfp = 0x10fdab20
graph = 0x10fed7f0
trav = 0x0
fuse_volume_found = 0
xl_count = 19
process_mode = 0 '\000'
pipe_fd = {6, 7}
gf_success = 0
gf_failure = -1
__FUNCTION__ = "main"
(gdb) bt full
#0 0x00002b6cc5d56f5c in rpcsvc_submit_generic (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:1607
ret = 0
replyiob = 0x11383ae8
recordhdr = {iov_base = 0x2aaac52a0000, iov_len = 28}
conn = 0x112737f0
__FUNCTION__ = "rpcsvc_submit_generic"
#1 0x00002b6cc5d595be in rpcsvc_submit_message (req=0x1143b300, msgvec=..., msg=0x11383a70)
at rpcsvc.c:2715
No locals.
#2 0x00002b6cc5b1f38b in nfs3svc_submit_reply (req=0x1143b300, arg=0x7fff417f5090,
sfunc=0x2b6cc5d4dba1 <xdr_serialize_write3res>) at nfs3.c:308
outmsg = {iov_base = 0x2aaac5240000, iov_len = 136}
iob = 0x11383a70
ret = -1
__FUNCTION__ = "nfs3svc_submit_reply"
#3 0x00002b6cc5b22c87 in nfs3_write_reply (req=0x1143b300, stat=NFS3_OK, count=65536,
stable=FILE_SYNC, wverf=1298468948, prestat=0x2aaac00e0310, poststat=0x7fff417f5280)
at nfs3.c:1576
res = {status = NFS3_OK, write3res_u = {resok = {file_wcc = {before = {
attributes_follow = 1, pre_op_attr_u = {attributes = {
size = 268435456000, mtime = {seconds = 1298486400, nseconds = 0},
ctime = {seconds = 1298486400, nseconds = 0}}}}, after = {
attributes_follow = 1, post_op_attr_u = {attributes = {type = NF3REG,
mode = 384, nlink = 1, uid = 0, gid = 0, size = 268435456000,
used = 258549837312, rdev = {specdata1 = 0, specdata2 = 0}, fsid = 5,
fileid = 1342177357, atime = {seconds = 1298486395, nseconds = 0},
mtime = {seconds = 1298486400, nseconds = 0}, ctime = {
seconds = 1298486400, nseconds = 0}}}}}, count = 65536,
committed = FILE_SYNC, verf = "T\020eM\000\000\000"}, resfail = {file_wcc = {
---Type <return> to continue, or q <return> to quit---
before = {attributes_follow = 1, pre_op_attr_u = {attributes = {
size = 268435456000, mtime = {seconds = 1298486400, nseconds = 0},
ctime = {seconds = 1298486400, nseconds = 0}}}}, after = {
attributes_follow = 1, post_op_attr_u = {attributes = {type = NF3REG,
mode = 384, nlink = 1, uid = 0, gid = 0, size = 268435456000,
used = 258549837312, rdev = {specdata1 = 0, specdata2 = 0}, fsid = 5,
fileid = 1342177357, atime = {seconds = 1298486395, nseconds = 0},
mtime = {seconds = 1298486400, nseconds = 0}, ctime = {
seconds = 1298486400, nseconds = 0}}}}}}}}
xlid = 5
#4 0x00002b6cc5b22d86 in nfs3svc_write_fsync_cbk (frame=0x2aaac0094298, cookie=0x10fed7f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at nfs3.c:1601
nfs3 = 0x2aaaac00bad0
stat = NFS3_OK
cs = 0x2aaac00e0140
#5 0x00002b6cc5b16d43 in nfs_fop_fsync_cbk (frame=0x2aaac0094298, cookie=0x10fed7f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at nfs-fops.c:1172
nfl = 0x2aaac008e550
progcbk = 0x2b6cc5b22c8e <nfs3svc_write_fsync_cbk>
#6 0x00002b6cc58f6820 in iot_fsync_cbk (frame=0x2aaac0094320, cookie=0x2aaab42b79f0,
this=0x10feab30, op_ret=0, op_errno=0, prebuf=0x7fff417f52f0, postbuf=0x7fff417f5280)
at io-threads.c:893
fn = 0x2b6cc5b16c56 <nfs_fop_fsync_cbk>
_parent = 0x2aaac0094298
old_THIS = 0x10feab30
#7 0x00002b6cc56e0019 in client_fsync_cbk (frame=0x2aaab42b79f0, hdr=0x2aaac003e0c0,
hdrlen=268, iobuf=0x0) at client-protocol.c:4324
fn = 0x2b6cc58f672a <iot_fsync_cbk>
---Type <return> to continue, or q <return> to quit---
_parent = 0x2aaac0094320
old_THIS = 0x10fe38b0
prestat = {ia_ino = 1342177357, ia_gen = 0, ia_dev = 0, ia_type = IA_IFREG,
ia_prot = {suid = 0 '\000', sgid = 0 '\000', sticky = 0 '\000', owner = {
read = 1 '\001', write = 1 '\001', exec = 0 '\000'}, group = {
read = 0 '\000', write = 0 '\000', exec = 0 '\000'}, other = {
read = 0 '\000', write = 0 '\000', exec = 0 '\000'}}, ia_nlink = 1,
ia_uid = 0, ia_gid = 0, ia_rdev = 0, ia_size = 268435456000, ia_blksize = 131072,
ia_blocks = 504980151, ia_atime = 1298486395, ia_atime_nsec = 501149067,
ia_mtime = 1298486400, ia_mtime_nsec = 705963094, ia_ctime = 1298486400,
ia_ctime_nsec = 705963094}
poststat = {ia_ino = 1342177357, ia_gen = 0, ia_dev = 5, ia_type = IA_IFREG,
ia_prot = {suid = 0 '\000', sgid = 0 '\000', sticky = 0 '\000', owner = {
read = 1 '\001', write = 1 '\001', exec = 0 '\000'}, group = {
read = 0 '\000', write = 0 '\000', exec = 0 '\000'}, other = {
read = 0 '\000', write = 0 '\000', exec = 0 '\000'}}, ia_nlink = 1,
ia_uid = 0, ia_gid = 0, ia_rdev = 0, ia_size = 268435456000, ia_blksize = 131072,
ia_blocks = 504980151, ia_atime = 1298486395, ia_atime_nsec = 501149067,
ia_mtime = 1298486400, ia_mtime_nsec = 705963094, ia_ctime = 1298486400,
ia_ctime_nsec = 705963094}
rsp = 0x2aaac003e12c
op_ret = 0
op_errno = 0
#8 0x00002b6cc56e5af8 in protocol_client_interpret (this=0x10fe38b0, trans=0x2aaaac004010,
hdr_p=0x2aaac003e0c0 "", hdrlen=268, iobuf=0x0) at client-protocol.c:6137
ret = -1
frame = 0x2aaab42b79f0
hdr = 0x2aaac003e0c0
callid = 762459
type = 4
---Type <return> to continue, or q <return> to quit---
op = 15
conn = 0x2aaaac004430
__FUNCTION__ = "protocol_client_interpret"
#9 0x00002b6cc56e67be in protocol_client_pollin (this=0x10fe38b0, trans=0x2aaaac004010)
at client-protocol.c:6435
conf = 0x2aaaac003f90
ret = 0
iobuf = 0x0
hdr = 0x2aaac003e0c0 ""
hdrlen = 268
#10 0x00002b6cc56e6e35 in notify (this=0x10fe38b0, event=2, data=0x2aaaac004010)
at client-protocol.c:6554
i = 0
ret = -1
child_down = 1
was_not_down = 0
trans = 0x2aaaac004010
conn = 0x0
conf = 0x2aaaac003f90
parent = 0x0
__FUNCTION__ = "notify"
#11 0x00002b6cc4bf5b7c in xlator_notify (xl=0x10fe38b0, event=2, data=0x2aaaac004010)
at xlator.c:919
old_THIS = 0x2b6cc4e49640
ret = 0
#12 0x00002aaaaaf09e96 in socket_event_poll_in (this=0x2aaaac004010) at socket.c:731
ret = 0
#13 0x00002aaaaaf0a18b in socket_event_handler (fd=18, idx=10, data=0x2aaaac004010,
poll_in=1, poll_out=0, poll_err=0) at socket.c:831
this = 0x2aaaac004010
---Type <return> to continue, or q <return> to quit---
priv = 0x2aaaac004360
ret = 0
#14 0x00002b6cc4c1c2b9 in event_dispatch_epoll_handler (event_pool=0x10fd9340,
events=0x2aaaac009dc0, i=0) at event.c:804
event_data = 0x2aaaac009dc4
handler = 0x2aaaaaf0a0d4 <socket_event_handler>
data = 0x2aaaac004010
idx = 10
ret = -1
__FUNCTION__ = "event_dispatch_epoll_handler"
#15 0x00002b6cc4c1c48e in event_dispatch_epoll (event_pool=0x10fd9340) at event.c:867
events = 0x2aaaac009dc0
size = 1
i = 0
ret = 1
__FUNCTION__ = "event_dispatch_epoll"
#16 0x00002b6cc4c1c7a4 in event_dispatch (event_pool=0x10fd9340) at event.c:975
ret = -1
__FUNCTION__ = "event_dispatch"
#17 0x0000000000406344 in main (argc=4, argv=0x7fff417f5f58) at glusterfsd.c:1494
ctx = 0x10fd9010
cmd_args = 0x10fd9010
pool = 0x10fda8b0
stbuf = {st_dev = 15774436, st_ino = 194, st_nlink = 5393833408,
st_mode = 3303387512, st_uid = 11116, st_gid = 3300766560, pad0 = 11116,
st_rdev = 140734292254416, st_size = 47746157228032, st_blksize = 4199511,
st_blocks = 227633266916, st_atim = {tv_sec = 6442450944, tv_nsec = 0},
st_mtim = {tv_sec = 0, tv_nsec = 0}, st_ctim = {tv_sec = 15774436,
tv_nsec = 194}, __unused = {15774436, 140734292254278, 140734292254279}}
tmp_logfile = '\000' <repeats 1023 times>
---Type <return> to continue, or q <return> to quit---
memory corruption being a corner case and difficult to test, hence leaving this bug in same state as it is and will reopen if this issue happens again. *** Bug 2438 has been marked as a duplicate of this bug. *** As discussed with avati, its hard to reproduce. |