Skip to content
GitLab
Explore
Projects
Groups
Topics
Snippets
Projects
Groups
Topics
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Register
Sign in
Toggle navigation
Menu
UPSMF
knowledge-platform
Commits
49242b50
Commit
49242b50
authored
3 years ago
by
Jayaprakash n
Browse files
Options
Download
Patches
Plain Diff
Issue #SB-22862 feat: Collection CSV Hierarchy feature API - issue fixes
parent
f528c398
s-debug
AmiableAnil-patch-1
Gcloud_copy
Gcloud_fix
Remove_unwantedCode_Gcloud_fix
actors-test
bulk-upload-comptenecy-mapping
bulk-upload-excelsheet
bulk-upload-test_excel
bulk_upload
csp-migration
dependabot/maven/platform-core/platform-telemetry/ch.qos.logback-logback-core-1.2.9
dependabot/maven/search-api/search-core/org.apache.httpcomponents-httpclient-4.5.13
knowlg-friday
knowlg-oneclick
local-setup-fix
local-setup-kube
m-4.2.0
master
patch-1
patch-2
patch-3
poc_bulk_upload
qs-schema
rahul_bulk_upload_postgres
release-4.1.0
release-4.10.0
release-4.10.1
release-4.2.0
release-4.3.0
release-4.4.0
release-4.5.0
release-4.6.0
release-4.7.0
release-4.7.0-debug
release-4.8.0
release-4.8.0-debug
release-4.9.0
release-4.9.1
release-5.0.0
release-5.0.1
release-5.1.0
release-5.1.0-content
release-5.2.0
release-5.3.0
revert-718-visiblity-private-api
revert-719-assessment-private-read-api
revert-720-visiblity-public-search-api
revert-721-visiblity-private-search-api
release-5.2.0_RC2
release-5.2.0_RC1
release-5.1.0_RC1
release-5.0.0_RC2
release-5.0.0_RC1
release-4.10.0_RC2
release-4.10.0_RC1
release-4.9.1_RC1
release-4.9.0_RC8
release-4.9.0_RC7
release-4.9.0_RC6
release-4.9.0_RC5
release-4.9.0_RC4
release-4.9.0_RC3
release-4.9.0_RC2
release-4.9.0_RC1
release-4.8.0_RC5
release-4.8.0_RC4
release-4.8.0_RC3
release-4.8.0_RC2
release-4.8.0_RC1
release-4.7.0_RC6
release-4.7.0_RC5
release-4.7.0_RC4
release-4.7.0_RC3
release-4.7.0_RC2
release-4.7.0_RC1
release-4.6.0_RC2
release-4.6.0_RC1
release-4.5.0_RC2
release-4.5.0_RC1
release-4.4.0_RC6
release-4.4.0_RC5
release-4.4.0_RC4
release-4.4.0_RC3
release-4.4.0_RC2
release-4.4.0_RC1
release-4.3.0_RC10
release-4.3.0_RC9
release-4.3.0_RC8
release-4.3.0_RC7
release-4.3.0_RC6
release-4.3.0_RC5
release-4.3.0_RC4
release-4.3.0_RC3
release-4.3.0_RC2
release-4.3.0_RC1
release-4.2.0_RC13
release-4.2.0_RC12
release-4.2.0_RC11
release-4.2.0_RC10
release-4.2.0_RC9
release-4.2.0_RC8
release-4.2.0_RC7
release-4.2.0_RC6
release-4.2.0_RC5
release-4.2.0_RC4
release-4.2.0_RC3
release-4.2.0_RC2
release-4.2.0_RC1
release-4.1.0_RC13
release-4.1.0_RC12
release-4.1.0_RC11
release-4.1.0_RC10
release-4.1.0_RC9
release-4.1.0_RC8
release-4.1.0_RC7
release-4.1.0_RC6
release-4.1.0_RC5
release-4.1.0_RC4
release-4.1.0_RC3
release-4.1.0_RC2
No related merge requests found
Changes
8
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
8 changed files
content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/manager/CollectionCSVManager.scala
+30
-32
....sunbird/collectioncsv/manager/CollectionCSVManager.scala
content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/util/CollectionTOCConstants.scala
+2
-0
...g.sunbird/collectioncsv/util/CollectionTOCConstants.scala
content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/validator/CollectionCSVValidator.scala
+62
-70
...bird/collectioncsv/validator/CollectionCSVValidator.scala
content-api/collection-csv-actors/src/test/resources/MaxDescriptionLength.csv
+13
-0
...on-csv-actors/src/test/resources/MaxDescriptionLength.csv
content-api/collection-csv-actors/src/test/resources/MaxUnitNameLength.csv
+7
-0
...ction-csv-actors/src/test/resources/MaxUnitNameLength.csv
content-api/collection-csv-actors/src/test/resources/application.conf
+2
-0
...collection-csv-actors/src/test/resources/application.conf
content-api/collection-csv-actors/src/test/scala/org/sunbird/collectioncsv/TestCollectionCSVActor.scala
+36
-22
...la/org/sunbird/collectioncsv/TestCollectionCSVActor.scala
content-api/content-service/conf/application.conf
+2
-0
content-api/content-service/conf/application.conf
with
154 additions
and
124 deletions
+154
-124
content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/manager/CollectionCSVManager.scala
+
30
−
32
View file @
49242b50
...
...
@@ -6,9 +6,9 @@ import org.apache.commons.io.ByteOrderMark
import
org.apache.commons.io.FileUtils.
{
deleteQuietly
,
touch
}
import
org.sunbird.cloudstore.StorageService
import
org.sunbird.collectioncsv.util.CollectionTOCConstants
import
org.sunbird.collectioncsv.util.CollectionTOCConstants.
{
COLLECTION_TOC_ALLOWED_MIMETYPE
,
CONTENT_TYPE
}
import
org.sunbird.collectioncsv.util.CollectionTOCConstants.COLLECTION_TOC_ALLOWED_MIMETYPE
import
org.sunbird.collectioncsv.util.CollectionTOCUtil.linkDIALCode
import
org.sunbird.collectioncsv.validator.CollectionCSVValidator.
{
allowedContentTypes
,
collectionNodeIdentifierHeader
,
collectionOutputTocHeaders
,
contentTypeToUnitTypeMapping
,
createCSVMandatoryHeaderCols
,
folderHierarchyHdrColumnsList
,
linkedContentHdrColumnsList
,
mappedTopicsHeader
,
maxFolderLevels
}
import
org.sunbird.collectioncsv.validator.CollectionCSVValidator.
{
collectionNodeIdentifierHeader
,
collectionOutputTocHeaders
,
contentTypeToUnitTypeMapping
,
createCSVMandatoryHeaderCols
,
folderHierarchyHdrColumnsList
,
linkedContentHdrColumnsList
,
mappedTopicsHeader
,
maxFolderLevels
}
import
org.sunbird.common.
{
JsonUtils
,
Platform
}
import
org.sunbird.common.dto.
{
Request
,
Response
}
import
org.sunbird.common.exception.
{
ClientException
,
ServerException
}
...
...
@@ -57,7 +57,7 @@ object CollectionCSVManager extends CollectionInputFileReader {
}
def
updateCollection
(
collectionHierarchy
:
Map
[
String
,
AnyRef
],
csvRecords
:
util.List
[
CSVRecord
],
mode
:
String
,
linkedContentsDetails
:
List
[
Map
[
String
,
AnyRef
]])(
implicit
oec
:
OntologyEngineContext
,
ec
:
ExecutionContext
)
:
Future
[
Response
]
=
{
val
folderInfoMap
=
scala
.
collection
.
mutable
.
Map
.
empty
[
String
,
AnyRef
]
val
folderInfoMap
=
scala
.
collection
.
mutable
.
LinkedHash
Map
.
empty
[
String
,
AnyRef
]
//prepare Map(folderInfoMap) of each folder with its details from the csvRecords
populateFolderInfoMap
(
folderInfoMap
,
csvRecords
,
mode
)
...
...
@@ -198,17 +198,17 @@ object CollectionCSVManager extends CollectionInputFileReader {
val
nodeInfo
=
getNodeInfo
(
record
,
linkedContents
,
nodeDepth
,
nodeIndex
)
val
appendedMap
=
{
if
(
nodeDepth
==
1
)
nodesInfoMap
++
Map
(
nodeDepth
+
"."
+
nodeIndex
->
nodeInfo
)
else
nodesInfoMap
++
Map
(
parentDepthIndex
+
":"
+
nodeDepth
+
"."
+
nodeIndex
->
nodeInfo
)
if
(
nodeDepth
==
1
)
nodesInfoMap
++
Map
(
nodeDepth
+
"."
+
(
if
(
nodeIndex
<
10
)
"0"
+
nodeIndex
else
nodeIndex
)
->
nodeInfo
)
else
nodesInfoMap
++
Map
(
parentDepthIndex
+
":"
+
nodeDepth
+
"."
+
(
if
(
nodeIndex
<
10
)
"0"
+
nodeIndex
else
nodeIndex
)
->
nodeInfo
)
}
val
fetchedList
=
{
if
(
record
.
contains
(
CollectionTOCConstants
.
CHILDREN
))
if
(
nodeDepth
==
1
)
prepareNodeInfo
(
collectionUnitType
,
record
(
CollectionTOCConstants
.
CHILDREN
).
asInstanceOf
[
List
[
Map
[
String
,
AnyRef
]]],
appendedMap
,
nodeDepth
+
"."
+
nodeIndex
)
prepareNodeInfo
(
collectionUnitType
,
record
(
CollectionTOCConstants
.
CHILDREN
).
asInstanceOf
[
List
[
Map
[
String
,
AnyRef
]]],
appendedMap
,
nodeDepth
+
"."
+
(
if
(
nodeIndex
<
10
)
"0"
+
nodeIndex
else
nodeIndex
)
)
else
prepareNodeInfo
(
collectionUnitType
,
record
(
CollectionTOCConstants
.
CHILDREN
).
asInstanceOf
[
List
[
Map
[
String
,
AnyRef
]]],
appendedMap
,
parentDepthIndex
+
":"
+
nodeDepth
+
"."
+
nodeIndex
)
+
nodeDepth
+
"."
+
(
if
(
nodeIndex
<
10
)
"0"
+
nodeIndex
else
nodeIndex
)
)
else
List
(
appendedMap
)
}
fetchedList
...
...
@@ -250,12 +250,12 @@ object CollectionCSVManager extends CollectionInputFileReader {
val
nodeInfoMap
=
folderInfoMap
(
folderDataHashCode
).
asInstanceOf
[
scala.collection.mutable.Map
[
String
,
AnyRef
]]
if
(
nodeInfoMap
.
contains
(
CollectionTOCConstants
.
CHILDREN
))
{
var
childrenSet
=
nodeInfoMap
(
CollectionTOCConstants
.
CHILDREN
).
asInstanceOf
[
Se
t
[
String
]]
childrenSet
++=
Se
t
(
getCode
(
sortedFoldersDataList
.
get
(
sortedFoldersDataKey
.
indexOf
(
folderData
.
_1
)+
1
)))
var
childrenSet
=
nodeInfoMap
(
CollectionTOCConstants
.
CHILDREN
).
asInstanceOf
[
Se
q
[
String
]]
childrenSet
++=
Se
q
(
getCode
(
sortedFoldersDataList
.
get
(
sortedFoldersDataKey
.
indexOf
(
folderData
.
_1
)+
1
)))
nodeInfoMap
(
CollectionTOCConstants
.
CHILDREN
)
=
childrenSet
}
else
{
val
childrenList
=
Se
t
(
getCode
(
sortedFoldersDataList
.
get
(
sortedFoldersDataKey
.
indexOf
(
folderData
.
_1
)+
1
)))
val
childrenList
=
Se
q
(
getCode
(
sortedFoldersDataList
.
get
(
sortedFoldersDataKey
.
indexOf
(
folderData
.
_1
)+
1
)))
nodeInfoMap
+=
(
CollectionTOCConstants
.
CHILDREN
->
childrenList
)
}
folderInfoMap
(
folderDataHashCode
)
=
nodeInfoMap
...
...
@@ -266,7 +266,7 @@ object CollectionCSVManager extends CollectionInputFileReader {
if
(
mode
.
equals
(
CollectionTOCConstants
.
UPDATE
))
{
val
keywordsList
=
csvRecord
.
toMap
.
asScala
.
toMap
.
map
(
colData
=>
{
if
(
CollectionTOCConstants
.
KEYWORDS
.
equalsIgnoreCase
(
colData
.
_1
)
&&
colData
.
_2
.
nonEmpty
)
colData
.
_2
.
trim
.
split
(
","
).
toList
.
map
(
x
=>
x
.
trim
)
colData
.
_2
.
trim
.
split
(
","
).
toList
.
filter
(
x
=>
x
.
trim
.
nonEmpty
)
else
List
.
empty
}).
filter
(
msg
=>
msg
.
nonEmpty
).
flatten
.
toList
...
...
@@ -281,9 +281,9 @@ object CollectionCSVManager extends CollectionInputFileReader {
val
dialCode
=
if
(
csvRecordMap
(
CollectionTOCConstants
.
QR_CODE
).
nonEmpty
)
csvRecordMap
(
CollectionTOCConstants
.
QR_CODE
).
trim
else
""
val
csvLinkedContentsList
:
Se
t
[
String
]
=
csvRecord
.
toMap
.
asScala
.
toMap
.
map
(
colData
=>
{
val
csvLinkedContentsList
:
Se
q
[
String
]
=
csvRecord
.
toMap
.
asScala
.
toMap
.
map
(
colData
=>
{
if
(
linkedContentHdrColumnsList
.
contains
(
colData
.
_1
)
&&
colData
.
_2
.
nonEmpty
)
colData
.
_2
.
trim
.
toLowerCase
()
else
""
}).
filter
(
msg
=>
msg
.
nonEmpty
).
toSe
t
[
String
]
}).
filter
(
msg
=>
msg
.
nonEmpty
).
toSe
q
scala
.
collection
.
mutable
.
Map
(
CollectionTOCConstants
.
IDENTIFIER
->
csvRecordMap
(
collectionNodeIdentifierHeader
.
head
),
CollectionTOCConstants
.
NAME
->
folderData
.
_2
,
CollectionTOCConstants
.
DESCRIPTION
->
csvRecordMap
(
"Description"
),
CollectionTOCConstants
.
KEYWORDS
->
keywordsList
,
CollectionTOCConstants
.
TOPIC
->
mappedTopicsList
,
...
...
@@ -297,8 +297,8 @@ object CollectionCSVManager extends CollectionInputFileReader {
else
{
val
childrenList
=
{
if
((
sortedFoldersDataKey
.
indexOf
(
folderData
.
_1
)+
1
)
!=
sortedFoldersDataList
.
size
)
Se
t
(
getCode
(
sortedFoldersDataList
.
get
(
sortedFoldersDataKey
.
indexOf
(
folderData
.
_1
)+
1
)))
else
Se
t
.
empty
[
String
]
Se
q
(
getCode
(
sortedFoldersDataList
.
get
(
sortedFoldersDataKey
.
indexOf
(
folderData
.
_1
)+
1
)))
else
Se
q
.
empty
[
String
]
}
scala
.
collection
.
mutable
.
Map
(
CollectionTOCConstants
.
NAME
->
folderData
.
_2
,
CollectionTOCConstants
.
CHILDREN
->
childrenList
,
CollectionTOCConstants
.
LEVEL
->
folderData
.
_1
)
}
...
...
@@ -310,7 +310,7 @@ object CollectionCSVManager extends CollectionInputFileReader {
})
}
private
def
getNodesMetadata
(
folderInfoMap
:
mutable.Map
[
String
,
AnyRef
],
mode
:
String
,
frameworkID
:
String
,
collectionType
:
String
)
:
String
=
{
private
def
getNodesMetadata
(
folderInfoMap
:
mutable.
LinkedHash
Map
[
String
,
AnyRef
],
mode
:
String
,
frameworkID
:
String
,
collectionType
:
String
)
:
String
=
{
val
collectionUnitType
=
contentTypeToUnitTypeMapping
(
collectionType
)
folderInfoMap
.
map
(
record
=>
{
val
nodeInfo
=
record
.
_2
.
asInstanceOf
[
scala.collection.mutable.Map
[
String
,
AnyRef
]]
...
...
@@ -331,7 +331,7 @@ object CollectionCSVManager extends CollectionInputFileReader {
}).
mkString
(
","
)
}
private
def
getHierarchyMetadata
(
folderInfoMap
:
mutable.Map
[
String
,
AnyRef
],
mode
:
String
,
linkedContentsDetails
:
List
[
Map
[
String
,
AnyRef
]],
collectionID
:
String
,
collectionName
:
String
,
collectionType
:
String
)
:
String
=
{
private
def
getHierarchyMetadata
(
folderInfoMap
:
mutable.
LinkedHash
Map
[
String
,
AnyRef
],
mode
:
String
,
linkedContentsDetails
:
List
[
Map
[
String
,
AnyRef
]],
collectionID
:
String
,
collectionName
:
String
,
collectionType
:
String
)
:
String
=
{
val
collectionUnitType
=
contentTypeToUnitTypeMapping
(
collectionType
)
val
linkedContentsInfoMap
:
Map
[
String
,
Map
[
String
,
String
]]
=
if
(
linkedContentsDetails
.
nonEmpty
)
{
...
...
@@ -343,8 +343,7 @@ object CollectionCSVManager extends CollectionInputFileReader {
}).
toMap
}
else
Map
.
empty
[
String
,
Map
[
String
,
String
]]
val
collectionL1NodeList
=
{
folderInfoMap
.
map
(
nodeData
=>
{
val
collectionL1NodeList
=
folderInfoMap
.
map
(
nodeData
=>
{
if
(
nodeData
.
_2
.
asInstanceOf
[
scala.collection.mutable.Map
[
String
,
AnyRef
]](
CollectionTOCConstants
.
LEVEL
)!=
null
&&
nodeData
.
_2
.
asInstanceOf
[
scala.collection.mutable.Map
[
String
,
AnyRef
]](
CollectionTOCConstants
.
LEVEL
).
toString
.
equalsIgnoreCase
(
createCSVMandatoryHeaderCols
.
head
))
{
...
...
@@ -353,41 +352,40 @@ object CollectionCSVManager extends CollectionInputFileReader {
else
nodeData
.
_1
}
else
""
}).
filter
(
node
=>
node
.
nonEmpty
).
mkString
(
"[\""
,
"\",\""
,
"\"]"
)
}
}).
filter
(
node
=>
node
.
nonEmpty
).
toList
.
distinct
.
mkString
(
"[\""
,
"\",\""
,
"\"]"
)
val
hierarchyRootNode
=
s
""""$collectionID": {"name":"$collectionName","collectionType":"$collectionType","root":true,"children":$collectionL1NodeList}"""
val
hierarchyChildNodesMetadata
=
folderInfoMap
.
map
(
record
=>
{
val
nodeInfo
=
record
.
_2
.
asInstanceOf
[
scala.collection.mutable.Map
[
String
,
AnyRef
]]
if
(
mode
.
equals
(
CollectionTOCConstants
.
CREATE
))
{
s
""""${record._1}": {"name": "${nodeInfo("name").toString}","root": false,"contentType": "$collectionUnitType", "children": ${if(nodeInfo.contains(CollectionTOCConstants.CHILDREN)) nodeInfo(CollectionTOCConstants.CHILDREN).asInstanceOf[Se
t
[String]].mkString("[\"","\",\"","\"]") else "[]"}}"""
s
""""${record._1}": {"name": "${nodeInfo("name").toString}","root": false,"contentType": "$collectionUnitType", "children": ${if(nodeInfo.contains(CollectionTOCConstants.CHILDREN)) nodeInfo(CollectionTOCConstants.CHILDREN).asInstanceOf[Se
q
[String]].mkString("[\"","\",\"","\"]") else "[]"}}"""
}
else
{
val
childrenFolders
=
{
if
(
nodeInfo
.
contains
(
CollectionTOCConstants
.
CHILDREN
)
&&
nodeInfo
(
CollectionTOCConstants
.
CHILDREN
).
asInstanceOf
[
Se
t
[
String
]].
nonEmpty
&&
nodeInfo
.
contains
(
CollectionTOCConstants
.
LINKED_CONTENT
)
&&
nodeInfo
(
CollectionTOCConstants
.
LINKED_CONTENT
).
asInstanceOf
[
Se
t
[
String
]].
nonEmpty
)
{
val
allChildrenSet
=
nodeInfo
(
CollectionTOCConstants
.
CHILDREN
).
asInstanceOf
[
Se
t
[
String
]]
++
nodeInfo
(
CollectionTOCConstants
.
LINKED_CONTENT
).
asInstanceOf
[
Se
t
[
String
]]
if
(
nodeInfo
.
contains
(
CollectionTOCConstants
.
CHILDREN
)
&&
nodeInfo
(
CollectionTOCConstants
.
CHILDREN
).
asInstanceOf
[
Se
q
[
String
]].
nonEmpty
&&
nodeInfo
.
contains
(
CollectionTOCConstants
.
LINKED_CONTENT
)
&&
nodeInfo
(
CollectionTOCConstants
.
LINKED_CONTENT
).
asInstanceOf
[
Se
q
[
String
]].
nonEmpty
)
{
val
allChildrenSet
=
nodeInfo
(
CollectionTOCConstants
.
CHILDREN
).
asInstanceOf
[
Se
q
[
String
]]
++
nodeInfo
(
CollectionTOCConstants
.
LINKED_CONTENT
).
asInstanceOf
[
Se
q
[
String
]]
allChildrenSet
.
map
(
childFolder
=>
{
if
(
folderInfoMap
.
contains
(
childFolder
))
folderInfoMap
(
childFolder
).
asInstanceOf
[
scala.collection.mutable.Map
[
String
,
AnyRef
]](
CollectionTOCConstants
.
IDENTIFIER
).
toString
else
childFolder
}).
mkString
(
"[\""
,
"\",\""
,
"\"]"
)
}
else
if
(
nodeInfo
.
contains
(
CollectionTOCConstants
.
CHILDREN
)
&&
nodeInfo
(
CollectionTOCConstants
.
CHILDREN
).
asInstanceOf
[
Se
t
[
String
]].
nonEmpty
)
nodeInfo
(
CollectionTOCConstants
.
CHILDREN
).
asInstanceOf
[
Se
t
[
String
]].
map
(
childFolder
=>
{
else
if
(
nodeInfo
.
contains
(
CollectionTOCConstants
.
CHILDREN
)
&&
nodeInfo
(
CollectionTOCConstants
.
CHILDREN
).
asInstanceOf
[
Se
q
[
String
]].
nonEmpty
)
nodeInfo
(
CollectionTOCConstants
.
CHILDREN
).
asInstanceOf
[
Se
q
[
String
]].
map
(
childFolder
=>
{
folderInfoMap
(
childFolder
).
asInstanceOf
[
scala.collection.mutable.Map
[
String
,
AnyRef
]](
CollectionTOCConstants
.
IDENTIFIER
).
toString
}).
mkString
(
"[\""
,
"\",\""
,
"\"]"
)
else
if
(
nodeInfo
.
contains
(
CollectionTOCConstants
.
LINKED_CONTENT
)
&&
nodeInfo
(
CollectionTOCConstants
.
LINKED_CONTENT
).
asInstanceOf
[
Se
t
[
String
]].
nonEmpty
)
nodeInfo
(
CollectionTOCConstants
.
LINKED_CONTENT
).
asInstanceOf
[
Se
t
[
String
]].
mkString
(
"[\""
,
"\",\""
,
"\"]"
)
else
if
(
nodeInfo
.
contains
(
CollectionTOCConstants
.
LINKED_CONTENT
)
&&
nodeInfo
(
CollectionTOCConstants
.
LINKED_CONTENT
).
asInstanceOf
[
Se
q
[
String
]].
nonEmpty
)
nodeInfo
(
CollectionTOCConstants
.
LINKED_CONTENT
).
asInstanceOf
[
Se
q
[
String
]].
mkString
(
"[\""
,
"\",\""
,
"\"]"
)
else
"[]"
}
val
folderNodeHierarchy
=
s
""""${nodeInfo(CollectionTOCConstants.IDENTIFIER).toString}": {"name": "${nodeInfo("name").toString}","root": false,"contentType": "$collectionUnitType", "children": $childrenFolders}"""
val
contentsNode
=
if
(
nodeInfo
.
contains
(
CollectionTOCConstants
.
LINKED_CONTENT
)
&&
nodeInfo
(
CollectionTOCConstants
.
LINKED_CONTENT
).
asInstanceOf
[
Se
t
[
String
]].
nonEmpty
&&
linkedContentsInfoMap
.
nonEmpty
)
val
contentsNode
=
if
(
nodeInfo
.
contains
(
CollectionTOCConstants
.
LINKED_CONTENT
)
&&
nodeInfo
(
CollectionTOCConstants
.
LINKED_CONTENT
).
asInstanceOf
[
Se
q
[
String
]].
nonEmpty
&&
linkedContentsInfoMap
.
nonEmpty
)
{
val
LinkedContentInfo
=
nodeInfo
(
CollectionTOCConstants
.
LINKED_CONTENT
).
asInstanceOf
[
Se
t
[
String
]].
map
(
contentId
=>
{
val
LinkedContentInfo
=
nodeInfo
(
CollectionTOCConstants
.
LINKED_CONTENT
).
asInstanceOf
[
Se
q
[
String
]].
map
(
contentId
=>
{
val
linkedContentDetails
:
Map
[
String
,
String
]
=
linkedContentsInfoMap
(
contentId
)
s
""""${linkedContentDetails(CollectionTOCConstants.IDENTIFIER)}": {"name": "${linkedContentDetails(CollectionTOCConstants.NAME)}","root": false,"contentType": "${linkedContentDetails(CollectionTOCConstants.CONTENT_TYPE)}", "children": []}"""
}).
mkString
(
","
)
...
...
@@ -421,7 +419,7 @@ object CollectionCSVManager extends CollectionInputFileReader {
updateHierarchyRequest
}
private
def
linkDIALCodes
(
folderInfoMap
:
mutable.Map
[
String
,
AnyRef
],
channelID
:
String
,
collectionID
:
String
)(
implicit
oec
:
OntologyEngineContext
,
ec
:
ExecutionContext
)
:
Unit
=
{
private
def
linkDIALCodes
(
folderInfoMap
:
mutable.
LinkedHash
Map
[
String
,
AnyRef
],
channelID
:
String
,
collectionID
:
String
)(
implicit
oec
:
OntologyEngineContext
,
ec
:
ExecutionContext
)
:
Unit
=
{
//invoke DIAL code Linking
val
linkDIALCodeReqMap
=
folderInfoMap
.
map
(
record
=>
{
val
nodeInfo
=
record
.
_2
.
asInstanceOf
[
scala.collection.mutable.Map
[
String
,
AnyRef
]]
...
...
This diff is collapsed.
Click to expand it.
content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/util/CollectionTOCConstants.scala
+
2
−
0
View file @
49242b50
...
...
@@ -67,6 +67,8 @@ object CollectionTOCConstants {
val
TERMS
=
"terms"
val
TOC_URL
=
"tocUrl"
val
TTL
=
"ttl"
val
COLLECTION_UNIT_FIELD_MAX_LENGTH
=
"collection.csv.maxUnitFieldLength"
val
COLLECTION_DESC_FIELD_MAX_LENGTH
=
"collection.csv.maxDescFieldLength"
val
LEARNING_SERVICE_BASE_URL
=
"learning_service.api.base_url"
val
SUNBIRD_AUTHORIZATION
=
"learning_service.api.auth_key"
...
...
This diff is collapsed.
Click to expand it.
content-api/collection-csv-actors/src/main/scala/org.sunbird/collectioncsv/validator/CollectionCSVValidator.scala
+
62
−
70
View file @
49242b50
This diff is collapsed.
Click to expand it.
content-api/collection-csv-actors/src/test/resources/MaxDescriptionLength.csv
0 → 100644
+
13
−
0
View file @
49242b50
"Level 1 Folder","Level 2 Folder","Level 3 Folder","Level 4 Folder","Description"
"5. Human Body",,,,"This chapter describes about human body"
"5. Human Body","5.1 Parts of Body",,,"This section describes about various part of the body such as head, hands, legs etc.
This section describes about various part of the body such as head, hands, legs etc.
This section describes about various part of the body such as head, hands, legs etc.
This section describes about various part of the body such as head, hands, legs etc."
"5. Human Body","5.1 Parts of Body","5.1.1 Key parts in the head",,"xyz"
"5. Human Body","5.1 Parts of Body","5.1.2 Other parts",,
"5. Human Body","5.2 Organ Systems",,,
"5. Human Body","5.2 Organ Systems","5.2.1 Respiratory System","dsffgdg",
This diff is collapsed.
Click to expand it.
content-api/collection-csv-actors/src/test/resources/MaxUnitNameLength.csv
0 → 100644
+
7
−
0
View file @
49242b50
"Level 1 Folder","Level 2 Folder","Level 3 Folder","Level 4 Folder","Description"
"5. Human Body",,,,"This chapter describes about human body"
"5. Human Body","5.1 Parts of Body",,,"This section describes about various part of the body such as head, hands, legs etc."
"5. Human Body","5.1 Parts of Body","5.1.1 Key parts in the head trying to exceed maximum Unit folder data length to verify the error message",,"xyz"
"5. Human Body","5.1 Parts of Body","5.1.2 Other parts",,
"5. Human Body","5.2 Organ Systems",,,
"5. Human Body","5.2 Organ Systems","5.2.1 Respiratory System","dsffgdg",
This diff is collapsed.
Click to expand it.
content-api/collection-csv-actors/src/test/resources/application.conf
+
2
−
0
View file @
49242b50
...
...
@@ -86,6 +86,8 @@ collection {
csv
{
maxRows
=
6500
maxFirstLevelUnits
=
30
maxUnitFieldLength
=
50
maxDescFieldLength
=
250
ttl
=
86400
contentTypeToUnitType
= {
"TextBook"
:
"TextBookUnit"
,
"Course"
:
"CourseUnit"
,
"Collection"
:
"CollectionUnit"
}
headers
{
...
...
This diff is collapsed.
Click to expand it.
content-api/collection-csv-actors/src/test/scala/org/sunbird/collectioncsv/TestCollectionCSVActor.scala
+
36
−
22
View file @
49242b50
...
...
@@ -94,7 +94,7 @@ class TestCollectionCSVActor extends FlatSpec with Matchers with MockFactory {
assert
(
response
.
getParams
.
getErr
.
equalsIgnoreCase
(
"COLLECTION_CHILDREN_EXISTS"
))
}
it
should
"return client error on input of create csv with
missing column and additional column
"
in
{
it
should
"return client error on input of create csv with
invalid sequence
"
in
{
(
oec
.
graphService
_
).
expects
().
returns
(
graphDB
).
anyNumberOfTimes
()
val
collectionID
=
"do_113293355858984960134"
val
node
=
createNode
()
...
...
@@ -103,11 +103,11 @@ class TestCollectionCSVActor extends FlatSpec with Matchers with MockFactory {
(
graphDB
.
getNodeByUniqueIds
(
_:
String
,
_:
SearchCriteria
)).
expects
(*,
*).
returns
(
Future
(
getNodes
(
node
))).
anyNumberOfTimes
()
(
graphDB
.
readExternalProps
(
_:
Request
,
_:
List
[
String
])).
expects
(*,
*).
returns
(
Future
(
getEmptyCassandraHierarchy
())).
anyNumberOfTimes
()
val
response
=
uploadFileToActor
(
collectionID
,
resourceDirectory
+
"InvalidHeader
sFound
.csv"
)
val
response
=
uploadFileToActor
(
collectionID
,
resourceDirectory
+
"InvalidHeader
Sequence
.csv"
)
assert
(
response
!=
null
)
println
(
"TestCollectionCSVActor --> response.getParams: "
+
response
.
getParams
)
assert
(
response
.
getResponseCode
==
ResponseCode
.
CLIENT_ERROR
)
assert
(
response
.
getParams
.
getErr
.
equalsIgnoreCase
(
"INVALID_HEADER
S_FOUND
"
))
assert
(
response
.
getParams
.
getErr
.
equalsIgnoreCase
(
"INVALID_HEADER
_SEQUENCE
"
))
}
it
should
"return client error on input of create csv with missing column"
in
{
...
...
@@ -123,7 +123,7 @@ class TestCollectionCSVActor extends FlatSpec with Matchers with MockFactory {
assert
(
response
!=
null
)
println
(
"TestCollectionCSVActor --> response.getParams: "
+
response
.
getParams
)
assert
(
response
.
getResponseCode
==
ResponseCode
.
CLIENT_ERROR
)
assert
(
response
.
getParams
.
getErr
.
equalsIgnoreCase
(
"REQUIRED_HEADER_MISSING"
)
||
response
.
getParams
.
getErr
.
equalsIgnoreCase
(
"
INVALID_HEADER_SEQUENCE
"
))
assert
(
response
.
getParams
.
getErr
.
equalsIgnoreCase
(
"REQUIRED_HEADER_MISSING"
)
||
response
.
getParams
.
getErr
.
equalsIgnoreCase
(
"
MISSING_HEADERS
"
))
}
it
should
"return client error on input of create csv with additional column"
in
{
...
...
@@ -206,6 +206,38 @@ class TestCollectionCSVActor extends FlatSpec with Matchers with MockFactory {
assert
(
response
.
getParams
.
getErr
.
equalsIgnoreCase
(
"DUPLICATE_ROWS"
))
}
it
should
"return client error on input of create csv with unit field data exceeding maximum length"
in
{
(
oec
.
graphService
_
).
expects
().
returns
(
graphDB
).
anyNumberOfTimes
()
val
collectionID
=
"do_113293355858984960134"
val
node
=
createNode
()
(
graphDB
.
upsertNode
(
_:
String
,
_:
Node
,
_:
Request
)).
expects
(*,
*,
*).
returns
(
Future
(
node
)).
anyNumberOfTimes
()
(
graphDB
.
getNodeByUniqueId
(
_:
String
,
_:
String
,
_:
Boolean
,
_:
Request
)).
expects
(*,
*,
*,
*).
returns
(
Future
(
node
)).
anyNumberOfTimes
()
(
graphDB
.
getNodeByUniqueIds
(
_:
String
,
_:
SearchCriteria
)).
expects
(*,
*).
returns
(
Future
(
getNodes
(
node
))).
anyNumberOfTimes
()
(
graphDB
.
readExternalProps
(
_:
Request
,
_:
List
[
String
])).
expects
(*,
*).
returns
(
Future
(
getEmptyCassandraHierarchy
())).
anyNumberOfTimes
()
val
response
=
uploadFileToActor
(
collectionID
,
resourceDirectory
+
"MaxUnitNameLength.csv"
)
assert
(
response
!=
null
)
println
(
"TestCollectionCSVActor --> response.getParams: "
+
response
.
getParams
)
assert
(
response
.
getResponseCode
==
ResponseCode
.
CLIENT_ERROR
)
assert
(
response
.
getParams
.
getErr
.
equalsIgnoreCase
(
"CSV_INVALID_FIELDS_LENGTH"
))
}
it
should
"return client error on input of create csv with Description field data exceeding maximum length"
in
{
(
oec
.
graphService
_
).
expects
().
returns
(
graphDB
).
anyNumberOfTimes
()
val
collectionID
=
"do_113293355858984960134"
val
node
=
createNode
()
(
graphDB
.
upsertNode
(
_:
String
,
_:
Node
,
_:
Request
)).
expects
(*,
*,
*).
returns
(
Future
(
node
)).
anyNumberOfTimes
()
(
graphDB
.
getNodeByUniqueId
(
_:
String
,
_:
String
,
_:
Boolean
,
_:
Request
)).
expects
(*,
*,
*,
*).
returns
(
Future
(
node
)).
anyNumberOfTimes
()
(
graphDB
.
getNodeByUniqueIds
(
_:
String
,
_:
SearchCriteria
)).
expects
(*,
*).
returns
(
Future
(
getNodes
(
node
))).
anyNumberOfTimes
()
(
graphDB
.
readExternalProps
(
_:
Request
,
_:
List
[
String
])).
expects
(*,
*).
returns
(
Future
(
getEmptyCassandraHierarchy
())).
anyNumberOfTimes
()
val
response
=
uploadFileToActor
(
collectionID
,
resourceDirectory
+
"MaxDescriptionLength.csv"
)
assert
(
response
!=
null
)
println
(
"TestCollectionCSVActor --> response.getParams: "
+
response
.
getParams
)
assert
(
response
.
getResponseCode
==
ResponseCode
.
CLIENT_ERROR
)
assert
(
response
.
getParams
.
getErr
.
equalsIgnoreCase
(
"CSV_INVALID_FIELDS_LENGTH"
))
}
it
should
"return client error on input of update csv with invalid QRCodeRequired and QRCode combination"
in
{
(
oec
.
graphService
_
).
expects
().
returns
(
graphDB
).
anyNumberOfTimes
()
val
collectionID
=
"do_1132828073514926081518"
...
...
@@ -341,24 +373,6 @@ class TestCollectionCSVActor extends FlatSpec with Matchers with MockFactory {
assert
(
response
.
getParams
.
getErr
.
equalsIgnoreCase
(
"CSV_INVALID_LINKED_CONTENTS"
))
}
it
should
"return client error on input of update csv with invalid contentType of linked contents"
in
{
(
oec
.
graphService
_
).
expects
().
returns
(
graphDB
).
anyNumberOfTimes
()
val
collectionID
=
"do_1132828073514926081518"
val
node
=
updateNode
()
(
graphDB
.
upsertNode
(
_:
String
,
_:
Node
,
_:
Request
)).
expects
(*,
*,
*).
returns
(
Future
(
node
)).
anyNumberOfTimes
()
(
graphDB
.
getNodeByUniqueId
(
_:
String
,
_:
String
,
_:
Boolean
,
_:
Request
)).
expects
(*,
*,
*,
*).
returns
(
Future
(
node
)).
anyNumberOfTimes
()
(
graphDB
.
getNodeByUniqueIds
(
_:
String
,
_:
SearchCriteria
)).
expects
(*,
*).
returns
(
Future
(
getNodes
(
node
))).
anyNumberOfTimes
()
(
graphDB
.
readExternalProps
(
_:
Request
,
_:
List
[
String
])).
expects
(*,
*).
returns
(
Future
(
getCassandraHierarchy
())).
anyNumberOfTimes
()
(
oec
.
httpUtil
_
).
expects
().
returns
(
httpUtil
)
(
httpUtil
.
post
(
_:
String
,
_:
java.util.Map
[
String
,
AnyRef
],
_:
java.util.Map
[
String
,
String
])).
expects
(*,
*,
*).
returns
(
linkedContentsInvalidContentTypeResponse
()).
anyNumberOfTimes
()
val
response
=
uploadFileToActor
(
collectionID
,
resourceDirectory
+
"InvalidLinkedContentContentType.csv"
)
assert
(
response
!=
null
)
println
(
"TestCollectionCSVActor --> response.getParams: "
+
response
.
getParams
)
assert
(
response
.
getResponseCode
==
ResponseCode
.
CLIENT_ERROR
)
assert
(
response
.
getParams
.
getErr
.
equalsIgnoreCase
(
"CSV_INVALID_LINKED_CONTENTS_CONTENT_TYPE"
))
}
it
should
"return success response on input of valid update TOC csv"
in
{
(
oec
.
graphService
_
).
expects
().
returns
(
graphDB
).
anyNumberOfTimes
()
val
collectionID
=
"do_1132828073514926081518"
...
...
This diff is collapsed.
Click to expand it.
content-api/content-service/conf/application.conf
+
2
−
0
View file @
49242b50
...
...
@@ -712,6 +712,8 @@ collection {
csv
{
maxRows
=
6500
maxFirstLevelUnits
=
30
maxUnitFieldLength
=
50
maxDescFieldLength
=
250
ttl
=
86400
contentTypeToUnitType
= {
"TextBook"
:
"TextBookUnit"
,
"Course"
:
"CourseUnit"
,
"Collection"
:
"CollectionUnit"
}
headers
{
...
...
This diff is collapsed.
Click to expand it.
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment
Menu
Explore
Projects
Groups
Topics
Snippets