Lines Matching refs:inode
30 static struct inode *qnx4_alloc_inode(struct super_block *sb);
31 static void qnx4_destroy_inode(struct inode *inode);
54 static int qnx4_get_block( struct inode *inode, sector_t iblock, struct buffer_head *bh, int create… in qnx4_get_block() argument
58 QNX4DEBUG((KERN_INFO "qnx4: qnx4_get_block inode=[%ld] iblock=[%ld]\n",inode->i_ino,iblock)); in qnx4_get_block()
60 phys = qnx4_block_map( inode, iblock ); in qnx4_get_block()
63 map_bh(bh, inode->i_sb, phys); in qnx4_get_block()
77 unsigned long qnx4_block_map( struct inode *inode, long iblock ) in qnx4_block_map() argument
83 struct qnx4_inode_entry *qnx4_inode = qnx4_raw_inode(inode); in qnx4_block_map()
97 bh = sb_bread(inode->i_sb, i_xblk - 1); in qnx4_block_map()
124 QNX4DEBUG((KERN_INFO "qnx4: mapping block %ld of inode %ld = %ld\n",iblock,inode->i_ino,block)); in qnx4_block_map()
189 struct inode *root; in qnx4_fill_super()
260 struct inode *qnx4_iget(struct super_block *sb, unsigned long ino) in qnx4_iget()
266 struct inode *inode; in qnx4_iget() local
268 inode = iget_locked(sb, ino); in qnx4_iget()
269 if (!inode) in qnx4_iget()
271 if (!(inode->i_state & I_NEW)) in qnx4_iget()
272 return inode; in qnx4_iget()
274 qnx4_inode = qnx4_raw_inode(inode); in qnx4_iget()
275 inode->i_mode = 0; in qnx4_iget()
282 iget_failed(inode); in qnx4_iget()
290 iget_failed(inode); in qnx4_iget()
296 inode->i_mode = le16_to_cpu(raw_inode->di_mode); in qnx4_iget()
297 i_uid_write(inode, (uid_t)le16_to_cpu(raw_inode->di_uid)); in qnx4_iget()
298 i_gid_write(inode, (gid_t)le16_to_cpu(raw_inode->di_gid)); in qnx4_iget()
299 set_nlink(inode, le16_to_cpu(raw_inode->di_nlink)); in qnx4_iget()
300 inode->i_size = le32_to_cpu(raw_inode->di_size); in qnx4_iget()
301 inode->i_mtime.tv_sec = le32_to_cpu(raw_inode->di_mtime); in qnx4_iget()
302 inode->i_mtime.tv_nsec = 0; in qnx4_iget()
303 inode->i_atime.tv_sec = le32_to_cpu(raw_inode->di_atime); in qnx4_iget()
304 inode->i_atime.tv_nsec = 0; in qnx4_iget()
305 inode->i_ctime.tv_sec = le32_to_cpu(raw_inode->di_ctime); in qnx4_iget()
306 inode->i_ctime.tv_nsec = 0; in qnx4_iget()
307 inode->i_blocks = le32_to_cpu(raw_inode->di_first_xtnt.xtnt_size); in qnx4_iget()
310 if (S_ISREG(inode->i_mode)) { in qnx4_iget()
311 inode->i_fop = &generic_ro_fops; in qnx4_iget()
312 inode->i_mapping->a_ops = &qnx4_aops; in qnx4_iget()
313 qnx4_i(inode)->mmu_private = inode->i_size; in qnx4_iget()
314 } else if (S_ISDIR(inode->i_mode)) { in qnx4_iget()
315 inode->i_op = &qnx4_dir_inode_operations; in qnx4_iget()
316 inode->i_fop = &qnx4_dir_operations; in qnx4_iget()
317 } else if (S_ISLNK(inode->i_mode)) { in qnx4_iget()
318 inode->i_op = &page_symlink_inode_operations; in qnx4_iget()
319 inode->i_mapping->a_ops = &qnx4_aops; in qnx4_iget()
320 qnx4_i(inode)->mmu_private = inode->i_size; in qnx4_iget()
324 iget_failed(inode); in qnx4_iget()
329 unlock_new_inode(inode); in qnx4_iget()
330 return inode; in qnx4_iget()
335 static struct inode *qnx4_alloc_inode(struct super_block *sb) in qnx4_alloc_inode()
346 struct inode *inode = container_of(head, struct inode, i_rcu); in qnx4_i_callback() local
347 kmem_cache_free(qnx4_inode_cachep, qnx4_i(inode)); in qnx4_i_callback()
350 static void qnx4_destroy_inode(struct inode *inode) in qnx4_destroy_inode() argument
352 call_rcu(&inode->i_rcu, qnx4_i_callback); in qnx4_destroy_inode()